marinofaggiana 5 years ago
parent
commit
7873fa1230
100 changed files with 23719 additions and 0 deletions
  1. BIN
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/FirebaseMLCommon
  2. 32 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRLocalModel.h
  3. 42 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRModelDownloadConditions.h
  4. 70 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRModelManager.h
  5. 47 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRRemoteModel.h
  6. 4 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FirebaseMLCommon.h
  7. 14 0
      Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Modules/module.modulemap
  8. BIN
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/FirebaseMLVision
  9. 191 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVision.h
  10. 676 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcode.h
  11. 40 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcodeDetector.h
  12. 101 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcodeDetectorOptions.h
  13. 41 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudDetectorOptions.h
  14. 29 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudDocumentTextRecognizerOptions.h
  15. 30 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudImageLabelerOptions.h
  16. 49 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudLandmark.h
  17. 40 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudLandmarkDetector.h
  18. 49 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudTextRecognizerOptions.h
  19. 30 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentText.h
  20. 92 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextBlock.h
  21. 55 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextParagraph.h
  22. 42 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextRecognizer.h
  23. 49 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextSymbol.h
  24. 55 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextWord.h
  25. 106 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFace.h
  26. 77 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceContour.h
  27. 55 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceDetector.h
  28. 124 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceDetectorOptions.h
  29. 66 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceLandmark.h
  30. 53 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImage.h
  31. 39 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageLabel.h
  32. 55 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageLabeler.h
  33. 59 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageMetadata.h
  34. 41 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionLatitudeLongitude.h
  35. 25 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionOnDeviceImageLabelerOptions.h
  36. 35 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionPoint.h
  37. 30 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionText.h
  38. 57 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextBlock.h
  39. 51 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextElement.h
  40. 57 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextLine.h
  41. 60 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizedBreak.h
  42. 24 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizedLanguage.h
  43. 61 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizer.h
  44. 35 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FirebaseMLVision.h
  45. 18 0
      Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Modules/module.modulemap
  46. BIN
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/GTMSessionFetcher
  47. 52 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMGatherInputStream.h
  48. 148 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMMIMEDocument.h
  49. 49 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMReadMonitorInputStream.h
  50. 1305 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcher.h
  51. 112 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcherLogging.h
  52. 193 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcherService.h
  53. 166 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionUploadFetcher.h
  54. 6 0
      Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Modules/module.modulemap
  55. BIN
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/GoogleAPIClientForREST
  56. 29 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBase64.h
  57. 85 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBatchQuery.h
  58. 78 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBatchResult.h
  59. 115 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDateTime.h
  60. 109 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDefines.h
  61. 83 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDuration.h
  62. 116 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRErrorObject.h
  63. 34 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRFramework.h
  64. 317 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRObject.h
  65. 253 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRQuery.h
  66. 73 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRRuntimeCommon.h
  67. 879 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRService.h
  68. 48 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRURITemplate.h
  69. 124 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRUploadParameters.h
  70. 52 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRUtilities.h
  71. 15 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVision.h
  72. 11155 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionObjects.h
  73. 1235 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionQuery.h
  74. 88 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionService.h
  75. 5 0
      Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Modules/module.modulemap
  76. BIN
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/GoogleMobileVision
  77. 42 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVDetector.h
  78. 461 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVDetectorConstants.h
  79. 874 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVFeature.h
  80. 58 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVUtility.h
  81. 4 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GoogleMobileVision.h
  82. 16 0
      Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Modules/module.modulemap
  83. BIN
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/GoogleToolboxForMac
  84. 100 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDebugSelectorValidation.h
  85. 44 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDebugThreadValidation.h
  86. 375 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDefines.h
  87. 79 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMLocalizedString.h
  88. 508 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMLogger.h
  89. 69 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMMethodCheck.h
  90. 199 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSData+zlib.h
  91. 40 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSDictionary+URLArguments.h
  92. 45 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSString+URLArguments.h
  93. 112 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMStringEncoding.h
  94. 71 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMTypeCasting.h
  95. 73 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMURLBuilder.h
  96. 6 0
      Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Modules/module.modulemap
  97. 183 0
      Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Any.pbobjc.h
  98. 311 0
      Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Api.pbobjc.h
  99. 145 0
      Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Duration.pbobjc.h
  100. 74 0
      Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Empty.pbobjc.h

BIN
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/FirebaseMLCommon


+ 32 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRLocalModel.h

@@ -0,0 +1,32 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** A model stored locally on the device. */
+NS_SWIFT_NAME(LocalModel)
+@interface FIRLocalModel : NSObject
+
+/** The model name. */
+@property(nonatomic, copy, readonly) NSString *name;
+
+/** An absolute path to the model file stored locally on the device. */
+@property(nonatomic, copy, readonly) NSString *path;
+
+/**
+ * Creates an instance of `LocalModel` with the given name and file path.
+ *
+ * @param name The name of the local model. Within the same Firebase app, all local models should
+ *     have distinct names.
+ * @param path An absolute path to the model file stored locally on the device.
+ * @return A new `LocalModel` instance.
+ */
+- (instancetype)initWithName:(NSString *)name
+                        path:(NSString *)path NS_DESIGNATED_INITIALIZER
+    NS_SWIFT_NAME(init(name:path:));
+
+/** Unavailable. */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 42 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRModelDownloadConditions.h

@@ -0,0 +1,42 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Configurations for model downloading conditions. */
+NS_SWIFT_NAME(ModelDownloadConditions)
+@interface FIRModelDownloadConditions : NSObject<NSCopying>
+
+/**
+ * Indicates whether download requests should be made over a cellular network. The default is `YES`.
+ */
+@property(nonatomic, readonly) BOOL allowsCellularAccess;
+
+/**
+ * Indicates whether the model can be downloaded while the app is in the background. The default is
+ * `NO`.
+ */
+@property(nonatomic, readonly) BOOL allowsBackgroundDownloading;
+
+/**
+ * Creates an instance of `ModelDownloadConditions` with the given conditions.
+ *
+ * @param allowsCellularAccess Whether download requests should be made over a cellular network.
+ * @param allowsBackgroundDownloading Whether the model can be downloaded while the app is in the
+ *     background.
+ * @return A new `ModelDownloadConditions` instance.
+ */
+- (instancetype)initWithAllowsCellularAccess:(BOOL)allowsCellularAccess
+                 allowsBackgroundDownloading:(BOOL)allowsBackgroundDownloading
+    NS_DESIGNATED_INITIALIZER;
+
+/**
+ * Creates an instance of `ModelDownloadConditions` with the default conditions. The default values
+ * are specified in the documentation for each instance property.
+ *
+ * @return A new `ModelDownloadConditions` instance.
+ */
+- (instancetype)init;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 70 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRModelManager.h

@@ -0,0 +1,70 @@
+#import <Foundation/Foundation.h>
+
+@class FIRLocalModel;
+@class FIRRemoteModel;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Manages models that are used by MLKit features. */
+NS_SWIFT_NAME(ModelManager)
+@interface FIRModelManager : NSObject
+
+/**
+ * Returns the `ModelManager` instance for the default Firebase app. The default Firebase app
+ * instance must be configured before calling this method; otherwise, raises `FIRAppNotConfigured`
+ * exception. Models hosted in non-default Firebase apps are currently not supported. The returned
+ * model manager is thread safe.
+ *
+ * @return The `ModelManager` instance for the default Firebase app.
+ */
++ (instancetype)modelManager NS_SWIFT_NAME(modelManager());
+
+/** Unavailable. Use the `modelManager()` class method. */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Registers a remote model. The model name is unique to each remote model and can only be
+ * registered once with a given instance of `ModelManager`. The model name should be the same name
+ * used when the model was uploaded to the Firebase Console. It's OK to separately register a remote
+ * and local model with the same name for a given instance of `ModelManager`.
+ *
+ * @param remoteModel The remote model to register.
+ * @return Whether the registration was successful. Returns `NO` if the given `remoteModel` is
+ *     invalid or has already been registered.
+ */
+- (BOOL)registerRemoteModel:(FIRRemoteModel *)remoteModel;
+
+/**
+ * Registers a local model. The model name is unique to each local model and can only be registered
+ * once with a given instance of `ModelManager`. It's OK to separately register a remote and local
+ * model with the same name for a given instance of `ModelManager`.
+ *
+ * @param localModel The local model to register.
+ * @return Whether the registration was successful. Returns `NO` if the given `localModel` is
+ *     invalid or has already been registered.
+ */
+- (BOOL)registerLocalModel:(FIRLocalModel *)localModel;
+
+/**
+ * Returns the registered remote model with the given name. Returns `nil` if the model was never
+ * registered with this model manager.
+ *
+ * @param name Name of the remote model.
+ * @return The remote model that was registered with the given name. Returns `nil` if the model was
+ *     never registered with this model manager.
+ */
+- (nullable FIRRemoteModel *)remoteModelWithName:(NSString *)name;
+
+/**
+ * Returns the registered local model with the given name. Returns `nil` if the model was never
+ * registered with this model manager.
+ *
+ * @param name Name of the local model.
+ * @return The local model that was registered with the given name. Returns `nil` if the model was
+ *     never registered with this model manager.
+ */
+- (nullable FIRLocalModel *)localModelWithName:(NSString *)name;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 47 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FIRRemoteModel.h

@@ -0,0 +1,47 @@
+#import <Foundation/Foundation.h>
+
+@class FIRModelDownloadConditions;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** A model that is stored remotely on the server and downloaded to the device. */
+NS_SWIFT_NAME(RemoteModel)
+@interface FIRRemoteModel : NSObject
+
+/** The model name. */
+@property(nonatomic, copy, readonly) NSString *name;
+
+/** Indicates whether model updates are allowed. */
+@property(nonatomic, readonly) BOOL allowsModelUpdates;
+
+/** Initial downloading conditions for the model. */
+@property(nonatomic, readonly) FIRModelDownloadConditions *initialConditions;
+
+/** Downloading conditions for subsequent calls to update the model. */
+@property(nonatomic, readonly) FIRModelDownloadConditions *updateConditions;
+
+/**
+ * Creates an instance of `RemoteModel` with the given name and download conditions.
+ *
+ * @param name The name of the remote model. Specify the name assigned to the model when it was
+ *     uploaded to the Firebase Console. Within the same Firebase app, all remote models should have
+ *     distinct names.
+ * @param allowsModelUpdates Indicates whether model updates are allowed.
+ * @param initialConditions Initial downloading conditions for the model.
+ * @param updateConditions Downloading conditions for subsequent calls to update the model. If `nil`
+ *     is passed and `allowsModelUpdates` is `YES`, the default download conditions are used via the
+ *     `ModelDownloadConditions` `init` call.
+ * @return A new `RemoteModel` instance.
+ */
+- (instancetype)initWithName:(NSString *)name
+          allowsModelUpdates:(BOOL)allowsModelUpdates
+           initialConditions:(FIRModelDownloadConditions *)initialConditions
+            updateConditions:(nullable FIRModelDownloadConditions *)updateConditions
+    NS_SWIFT_NAME(init(name:allowsModelUpdates:initialConditions:updateConditions:));
+
+/** Unavailable. */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 4 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Headers/FirebaseMLCommon.h

@@ -0,0 +1,4 @@
+#import "FIRLocalModel.h"
+#import "FIRModelDownloadConditions.h"
+#import "FIRModelManager.h"
+#import "FIRRemoteModel.h"

+ 14 - 0
Libraries external/Firebase/MLVision/FirebaseMLCommon.framework/Modules/module.modulemap

@@ -0,0 +1,14 @@
+framework module FirebaseMLCommon {
+  umbrella header "FirebaseMLCommon.h"
+  export *
+  module * { export * }
+  link "sqlite3"
+  link "z"
+  link framework "CoreGraphics"
+  link framework "Foundation"
+  link framework "LocalAuthentication"
+  link framework "QuartzCore"
+  link framework "Security"
+  link framework "SystemConfiguration"
+  link framework "UIKit"
+}

BIN
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/FirebaseMLVision


+ 191 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVision.h

@@ -0,0 +1,191 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRApp;
+@class FIRVisionBarcodeDetector;
+@class FIRVisionBarcodeDetectorOptions;
+@class FIRVisionCloudDetectorOptions;
+@class FIRVisionCloudDocumentTextRecognizerOptions;
+@class FIRVisionCloudImageLabelerOptions;
+@class FIRVisionCloudLandmarkDetector;
+@class FIRVisionCloudTextRecognizerOptions;
+@class FIRVisionDocumentTextRecognizer;
+@class FIRVisionFaceDetector;
+@class FIRVisionFaceDetectorOptions;
+@class FIRVisionImageLabeler;
+@class FIRVisionOnDeviceImageLabelerOptions;
+@class FIRVisionTextRecognizer;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A Firebase service that supports vision APIs.
+ */
+NS_SWIFT_NAME(Vision)
+@interface FIRVision : NSObject
+
+/**
+ * Enables stats collection in ML Kit vision. The stats include API call counts, errors, API call
+ * durations, options, etc. No personally identifiable information is logged.
+ *
+ * <p>The setting is per `FirebaseApp`, and therefore per `Vision`, and it is persistent across
+ * launches of the app. It means if the user uninstalls the app or clears all app data, the setting
+ * will be erased. The best practice is to set the flag in each initialization.
+ *
+ * <p>By default the logging is enabled. You have to specifically set it to false to disable
+ * logging.
+ */
+@property(nonatomic, getter=isStatsCollectionEnabled) BOOL statsCollectionEnabled;
+
+/**
+ * Gets an instance of Firebase Vision service for the default Firebase app. This method is thread
+ * safe. The default Firebase app instance must be configured before calling this method; otherwise,
+ * raises FIRAppNotConfigured exception.
+ *
+ * @return A Firebase Vision service instance, initialized with the default Firebase app.
+ */
++ (instancetype)vision NS_SWIFT_NAME(vision());
+
+/**
+ * Gets an instance of Firebase Vision service for the custom Firebase app. This method is thread
+ * safe.
+ *
+ * @param app The custom Firebase app used for initialization. Raises FIRAppInvalid exception if
+ *     `app` is nil.
+ * @return A Firebase Vision service instance, initialized with the custom Firebase app.
+ */
++ (instancetype)visionForApp:(FIRApp *)app NS_SWIFT_NAME(vision(app:));
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Gets a barcode detector with the given options. The returned detector is not thread safe.
+ *
+ * @param options Options containing barcode detector configuration.
+ * @return A barcode detector configured with the given options.
+ */
+- (FIRVisionBarcodeDetector *)barcodeDetectorWithOptions:(FIRVisionBarcodeDetectorOptions *)options
+    NS_SWIFT_NAME(barcodeDetector(options:));
+
+/**
+ * Gets a barcode detector with the default options. The returned detector is not thread safe.
+ *
+ * @return A barcode detector configured with the default options.
+ */
+- (FIRVisionBarcodeDetector *)barcodeDetector;
+
+/**
+ * Gets a face detector with the given options. The returned detector is not thread safe.
+ *
+ * @param options Options for configuring the face detector.
+ * @return A face detector configured with the given options.
+ */
+- (FIRVisionFaceDetector *)faceDetectorWithOptions:(FIRVisionFaceDetectorOptions *)options
+    NS_SWIFT_NAME(faceDetector(options:));
+
+/**
+ * Gets a face detector with the default options. The returned detector is not thread safe.
+ *
+ * @return A face detector configured with the default options.
+ */
+- (FIRVisionFaceDetector *)faceDetector;
+
+/**
+ * Gets an on-device image labeler with the given options. The returned image labeler is not thread
+ * safe.
+ *
+ * @param options Options for configuring the image labeler.
+ * @return An on-device image labeler configured with the given options.
+ */
+- (FIRVisionImageLabeler *)onDeviceImageLabelerWithOptions:
+    (FIRVisionOnDeviceImageLabelerOptions *)options NS_SWIFT_NAME(onDeviceImageLabeler(options:));
+
+/**
+ * Gets an on-device image labeler with the default options. The returned image labeler is not
+ * thread safe.
+ *
+ * @return An on-device image labeler configured with the default options.
+ */
+- (FIRVisionImageLabeler *)onDeviceImageLabeler;
+
+/**
+ * Gets an on-device text recognizer. The returned recognizer is not thread safe.
+ *
+ * @return A text recognizer.
+ */
+- (FIRVisionTextRecognizer *)onDeviceTextRecognizer;
+
+/**
+ * Gets a cloud text recognizer configured with the given options. The returned recognizer is not
+ * thread safe.
+ *
+ * @param options Options for configuring the cloud text recognizer.
+ * @return A text recognizer configured with the given options.
+ */
+- (FIRVisionTextRecognizer *)cloudTextRecognizerWithOptions:
+    (FIRVisionCloudTextRecognizerOptions *)options NS_SWIFT_NAME(cloudTextRecognizer(options:));
+
+/**
+ * Gets a cloud text recognizer. The returned recognizer is not thread safe.
+ *
+ * @return A text recognizer.
+ */
+- (FIRVisionTextRecognizer *)cloudTextRecognizer;
+
+/**
+ * Gets a cloud document text recognizer configured with the given options. The returned recognizer
+ * is not thread safe.
+ *
+ * @param options Options for configuring the cloud document text recognizer.
+ * @return A document text recognizer configured with the given options.
+ */
+- (FIRVisionDocumentTextRecognizer *)cloudDocumentTextRecognizerWithOptions:
+    (FIRVisionCloudDocumentTextRecognizerOptions *)options
+    NS_SWIFT_NAME(cloudDocumentTextRecognizer(options:));
+
+/**
+ * Gets a cloud document text recognizer. The returned recognizer is not thread safe.
+ *
+ * @return A document text recognizer.
+ */
+- (FIRVisionDocumentTextRecognizer *)cloudDocumentTextRecognizer;
+
+/**
+ * Gets an instance of cloud landmark detector with the given options.
+ *
+ * @param options Options for configuring the cloud landmark detector.
+ * @return A cloud landmark detector configured with the given options.
+ */
+- (FIRVisionCloudLandmarkDetector *)cloudLandmarkDetectorWithOptions:
+    (FIRVisionCloudDetectorOptions *)options
+    NS_SWIFT_NAME(cloudLandmarkDetector(options:));
+
+/**
+ * Gets an instance of cloud landmark detector with default options.
+ *
+ * @return A cloud landmark detector configured with default options.
+ */
+- (FIRVisionCloudLandmarkDetector *)cloudLandmarkDetector;
+
+/*
+ * Gets an instance of cloud image labeler with the given options.
+ *
+ * @param options Options for configuring the cloud image labeler.
+ * @return A cloud image labeler configured with the given options.
+ */
+- (FIRVisionImageLabeler *)cloudImageLabelerWithOptions:(FIRVisionCloudImageLabelerOptions *)options
+    NS_SWIFT_NAME(cloudImageLabeler(options:));
+
+/**
+ * Gets an instance of cloud image labeler with default options.
+ *
+ * @return A cloud image labeler configured with default options.
+ */
+- (FIRVisionImageLabeler *)cloudImageLabeler;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 676 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcode.h

@@ -0,0 +1,676 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+#import "FIRVisionBarcodeDetectorOptions.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionBarcodeValueType
+ * Barcode's value format. For example, TEXT, PRODUCT, URL, etc.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionBarcodeValueType) {
+  /**
+   * Unknown Barcode value types.
+   */
+  FIRVisionBarcodeValueTypeUnknown,
+  /**
+   * Barcode value type for contact info.
+   */
+  FIRVisionBarcodeValueTypeContactInfo,
+  /**
+   * Barcode value type for email addresses.
+   */
+  FIRVisionBarcodeValueTypeEmail,
+  /**
+   * Barcode value type for ISBNs.
+   */
+  FIRVisionBarcodeValueTypeISBN,
+  /**
+   * Barcode value type for phone numbers.
+   */
+  FIRVisionBarcodeValueTypePhone,
+  /**
+   * Barcode value type for product codes.
+   */
+  FIRVisionBarcodeValueTypeProduct,
+  /**
+   * Barcode value type for SMS details.
+   */
+  FIRVisionBarcodeValueTypeSMS,
+  /**
+   * Barcode value type for plain text.
+   */
+  FIRVisionBarcodeValueTypeText,
+  /**
+   * Barcode value type for URLs/bookmarks.
+   */
+  FIRVisionBarcodeValueTypeURL,
+  /**
+   * Barcode value type for Wi-Fi access point details.
+   */
+  FIRVisionBarcodeValueTypeWiFi,
+  /**
+   * Barcode value type for geographic coordinates.
+   */
+  FIRVisionBarcodeValueTypeGeographicCoordinates,
+  /**
+   * Barcode value type for calendar events.
+   */
+  FIRVisionBarcodeValueTypeCalendarEvent,
+  /**
+   * Barcode value type for driver's license data.
+   */
+  FIRVisionBarcodeValueTypeDriversLicense,
+} NS_SWIFT_NAME(VisionBarcodeValueType);
+
+/**
+ * @enum VisionBarcodeAddressType
+ * Address type.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionBarcodeAddressType) {
+  /**
+   * Barcode unknown address type.
+   */
+  FIRVisionBarcodeAddressTypeUnknown,
+  /**
+   * Barcode work address type.
+   */
+  FIRVisionBarcodeAddressTypeWork,
+  /**
+   * Barcode home address type.
+   */
+  FIRVisionBarcodeAddressTypeHome,
+} NS_SWIFT_NAME(VisionBarcodeAddressType);
+
+/**
+ * @enum VisionBarcodeEmailType
+ * Email type for VisionBarcodeEmail.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionBarcodeEmailType) {
+  /**
+   * Unknown email type.
+   */
+  FIRVisionBarcodeEmailTypeUnknown,
+  /**
+   * Barcode work email type.
+   */
+  FIRVisionBarcodeEmailTypeWork,
+  /**
+   * Barcode home email type.
+   */
+  FIRVisionBarcodeEmailTypeHome,
+} NS_SWIFT_NAME(VisionBarcodeEmailType);
+
+/**
+ * @enum VisionBarcodePhoneType
+ * Phone type for VisionBarcodePhone.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionBarcodePhoneType) {
+  /**
+   * Unknown phone type.
+   */
+  FIRVisionBarcodePhoneTypeUnknown,
+  /**
+   * Barcode work phone type.
+   */
+  FIRVisionBarcodePhoneTypeWork,
+  /**
+   * Barcode home phone type.
+   */
+  FIRVisionBarcodePhoneTypeHome,
+  /**
+   * Barcode fax phone type.
+   */
+  FIRVisionBarcodePhoneTypeFax,
+  /**
+   * Barcode mobile phone type.
+   */
+  FIRVisionBarcodePhoneTypeMobile,
+} NS_SWIFT_NAME(VisionBarcodePhoneType);
+
+/**
+ * @enum VisionBarcodeWiFiEncryptionType
+ * Wi-Fi encryption type for VisionBarcodeWiFi.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionBarcodeWiFiEncryptionType) {
+  /**
+   * Barcode unknown Wi-Fi encryption type.
+   */
+  FIRVisionBarcodeWiFiEncryptionTypeUnknown,
+  /**
+   * Barcode open Wi-Fi encryption type.
+   */
+  FIRVisionBarcodeWiFiEncryptionTypeOpen,
+  /**
+   * Barcode WPA Wi-Fi encryption type.
+   */
+  FIRVisionBarcodeWiFiEncryptionTypeWPA,
+  /**
+   * Barcode WEP Wi-Fi encryption type.
+   */
+  FIRVisionBarcodeWiFiEncryptionTypeWEP,
+} NS_SWIFT_NAME(VisionBarcodeWiFiEncryptionType);
+
+/**
+ * An address.
+ */
+NS_SWIFT_NAME(VisionBarcodeAddress)
+@interface FIRVisionBarcodeAddress : NSObject
+
+/**
+ * Formatted address, containing multiple lines when appropriate.
+ *
+ * The parsing of address formats is quite limited. Typically all address information will appear
+ * on the first address line. To handle addresses better, it is recommended to parse the raw data.
+ * The raw data is available in `FIRVisionBarcode`'s `rawValue` property.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSString *> *addressLines;
+
+/**
+ * Address type.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeAddressType type;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A calendar event extracted from a QR code.
+ */
+NS_SWIFT_NAME(VisionBarcodeCalendarEvent)
+@interface FIRVisionBarcodeCalendarEvent : NSObject
+
+/**
+ * Calendar event description.
+ */
+@property(nonatomic, readonly, nullable) NSString *eventDescription;
+
+/**
+ * Calendar event location.
+ */
+@property(nonatomic, readonly, nullable) NSString *location;
+
+/**
+ * Clendar event organizer.
+ */
+@property(nonatomic, readonly, nullable) NSString *organizer;
+
+/**
+ * Calendar event status.
+ */
+@property(nonatomic, readonly, nullable) NSString *status;
+
+/**
+ * Calendar event summary.
+ */
+@property(nonatomic, readonly, nullable) NSString *summary;
+
+/**
+ * Calendar event start date.
+ */
+@property(nonatomic, readonly, nullable) NSDate *start;
+
+/**
+ * Calendar event end date.
+ */
+@property(nonatomic, readonly, nullable) NSDate *end;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A driver license or ID card data representation.
+ *
+ * An ANSI driver license contains more fields than are represented by this class. The
+ * `FIRVisionBarcode`s `rawValue` property can be used to access the other fields.
+ */
+NS_SWIFT_NAME(VisionBarcodeDriverLicense)
+@interface FIRVisionBarcodeDriverLicense : NSObject
+
+/**
+ * Holder's first name.
+ */
+@property(nonatomic, readonly, nullable) NSString *firstName;
+
+/**
+ * Holder's middle name.
+ */
+@property(nonatomic, readonly, nullable) NSString *middleName;
+
+/**
+ * Holder's last name.
+ */
+@property(nonatomic, readonly, nullable) NSString *lastName;
+
+/**
+ * Holder's gender. 1 is male and 2 is female.
+ */
+@property(nonatomic, readonly, nullable) NSString *gender;
+
+/**
+ * Holder's city address.
+ */
+@property(nonatomic, readonly, nullable) NSString *addressCity;
+
+/**
+ * Holder's state address.
+ */
+@property(nonatomic, readonly, nullable) NSString *addressState;
+
+/**
+ * Holder's street address.
+ */
+@property(nonatomic, readonly, nullable) NSString *addressStreet;
+
+/**
+ * Holder's address' zipcode.
+ */
+@property(nonatomic, readonly, nullable) NSString *addressZip;
+
+/**
+ * Holder's birthday. The date format depends on the issuing country.
+ */
+@property(nonatomic, readonly, nullable) NSString *birthDate;
+
+/**
+ * "DL" for driver licenses, "ID" for ID cards.
+ */
+@property(nonatomic, readonly, nullable) NSString *documentType;
+
+/**
+ * Driver license ID number.
+ */
+@property(nonatomic, readonly, nullable) NSString *licenseNumber;
+
+/**
+ * Driver license expiration date. The date format depends on the issuing country.
+ */
+@property(nonatomic, readonly, nullable) NSString *expiryDate;
+
+/**
+ * The date format depends on the issuing country.
+ */
+@property(nonatomic, readonly, nullable) NSString *issuingDate;
+
+/**
+ * A country in which DL/ID was issued.
+ */
+@property(nonatomic, readonly, nullable) NSString *issuingCountry;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * An email message from a 'MAILTO:' or similar QR Code type.
+ */
+NS_SWIFT_NAME(VisionBarcodeEmail)
+@interface FIRVisionBarcodeEmail : NSObject
+
+/**
+ * Email message address.
+ */
+@property(nonatomic, readonly, nullable) NSString *address;
+
+/**
+ * Email message body.
+ */
+@property(nonatomic, readonly, nullable) NSString *body;
+
+/**
+ * Email message subject.
+ */
+@property(nonatomic, readonly, nullable) NSString *subject;
+
+/**
+ * Email message type.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeEmailType type;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * GPS coordinates from a 'GEO:' or similar QR Code type data.
+ */
+NS_SWIFT_NAME(VisionBarcodeGeoPoint)
+@interface FIRVisionBarcodeGeoPoint : NSObject
+/**
+ * A location latitude.
+ */
+@property(nonatomic, readonly) double latitude;
+
+/**
+ * A location longitude.
+ */
+@property(nonatomic, readonly) double longitude;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A person's name, both formatted and as individual name components.
+ */
+NS_SWIFT_NAME(VisionBarcodePersonName)
+@interface FIRVisionBarcodePersonName : NSObject
+
+/**
+ * Properly formatted name.
+ */
+@property(nonatomic, readonly, nullable) NSString *formattedName;
+
+/**
+ * First name.
+ */
+@property(nonatomic, readonly, nullable) NSString *first;
+
+/**
+ * Last name.
+ */
+@property(nonatomic, readonly, nullable) NSString *last;
+
+/**
+ * Middle name.
+ */
+@property(nonatomic, readonly, nullable) NSString *middle;
+
+/**
+ * Name prefix.
+ */
+@property(nonatomic, readonly, nullable) NSString *prefix;
+
+/**
+ * Designates a text string to be set as the kana name in the phonebook.
+ * Used for Japanese contacts.
+ */
+@property(nonatomic, readonly, nullable) NSString *pronounciation;
+
+/**
+ * Name suffix.
+ */
+@property(nonatomic, readonly, nullable) NSString *suffix;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A phone number from a 'TEL:' or similar QR Code type.
+ */
+NS_SWIFT_NAME(VisionBarcodePhone)
+@interface FIRVisionBarcodePhone : NSObject
+
+/**
+ * Phone number.
+ */
+@property(nonatomic, readonly, nullable) NSString *number;
+
+/**
+ * Phone number type.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodePhoneType type;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * An SMS message from an 'SMS:' or similar QR Code type.
+ */
+NS_SWIFT_NAME(VisionBarcodeSMS)
+@interface FIRVisionBarcodeSMS : NSObject
+
+/**
+ * An SMS message body.
+ */
+@property(nonatomic, readonly, nullable) NSString *message;
+
+/**
+ * An SMS message phone number.
+ */
+@property(nonatomic, readonly, nullable) NSString *phoneNumber;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A URL and title from a 'MEBKM:' or similar QR Code type.
+ */
+NS_SWIFT_NAME(VisionBarcodeURLBookmark)
+@interface FIRVisionBarcodeURLBookmark : NSObject
+
+/**
+ * A URL bookmark title.
+ */
+@property(nonatomic, readonly, nullable) NSString *title;
+
+/**
+ * A URL bookmark url.
+ */
+@property(nonatomic, readonly, nullable) NSString *url;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * Wi-Fi network parameters from a 'WIFI:' or similar QR Code type.
+ */
+NS_SWIFT_NAME(VisionBarcodeWifi)
+@interface FIRVisionBarcodeWiFi : NSObject
+
+/**
+ * A Wi-Fi access point SSID.
+ */
+@property(nonatomic, readonly, nullable) NSString *ssid;
+
+/**
+ * A Wi-Fi access point password.
+ */
+@property(nonatomic, readonly, nullable) NSString *password;
+
+/**
+ * A Wi-Fi access point encryption type.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeWiFiEncryptionType type;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A person's or organization's business card. This may come from different underlying formats
+ * including VCARD and MECARD.
+ *
+ * This object represents a simplified view of possible business cards. If you require lossless
+ * access to the information in the barcode, you should parse the raw data yourself. To access the
+ * raw data, use the `FIRVisionBarcode`s `rawValue` property.
+ */
+NS_SWIFT_NAME(VisionBarcodeContactInfo)
+@interface FIRVisionBarcodeContactInfo : NSObject
+
+/**
+ * Person's or organization's addresses.
+ */
+@property(nonatomic, readonly, nullable) NSArray<FIRVisionBarcodeAddress *> *addresses;
+
+/**
+ * Contact emails.
+ */
+@property(nonatomic, readonly, nullable) NSArray<FIRVisionBarcodeEmail *> *emails;
+
+/**
+ * A person's name.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodePersonName *name;
+
+/**
+ * Contact phone numbers.
+ */
+@property(nonatomic, readonly, nullable) NSArray<FIRVisionBarcodePhone *> *phones;
+
+/**
+ * Contact URLs.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSString *> *urls;
+
+/**
+ * A job title.
+ */
+@property(nonatomic, readonly, nullable) NSString *jobTitle;
+
+/**
+ * A business organization.
+ */
+@property(nonatomic, readonly, nullable) NSString *organization;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ * A barcode in an image.
+ */
+NS_SWIFT_NAME(VisionBarcode)
+@interface FIRVisionBarcode : NSObject
+
+/**
+ * The rectangle that holds the discovered relative to the detected image in the view
+ * coordinate system.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * A barcode value as it was encoded in the barcode. Structured values are not parsed, for example:
+ * 'MEBKM:TITLE:Google;URL:https://www.google.com;;'. Does not include the supplemental value.
+ */
+@property(nonatomic, readonly, nullable) NSString *rawValue;
+
+/**
+ * A barcode value in a user-friendly format. May omit some of the information encoded in the
+ * barcode. For example, in the case above the display value might be 'https://www.google.com'.
+ * If valueType == .text, this field will be equal to rawValue. This value may be multiline,
+ * for example, when line breaks are encoded into the original TEXT barcode value. May include
+ * the supplement value.
+ */
+@property(nonatomic, readonly, nullable) NSString *displayValue;
+
+/**
+ * A barcode format; for example, EAN_13. Note that if the format is not in the list,
+ * VisionBarcodeFormat.unknown would be returned.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeFormat format;
+
+/**
+ * The four corner points of the barcode, in clockwise order starting with the top left relative
+ * to the detected image in the view coordinate system. These are CGPoints boxed in NSValues.
+ * Due to the possible perspective distortions, this is not necessarily a rectangle.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * A type of the barcode value. For example, TEXT, PRODUCT, URL, etc. Note that if the type is not
+ * in the list, .unknown would be returned.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeValueType valueType;
+
+/**
+ * An email message from a 'MAILTO:' or similar QR Code type. This property is only set if
+ * valueType is .email.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeEmail *email;
+
+/**
+ * A phone number from a 'TEL:' or similar QR Code type. This property is only set if valueType
+ * is .phone.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodePhone *phone;
+
+/**
+ * An SMS message from an 'SMS:' or similar QR Code type. This property is only set if valueType
+ * is .sms.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeSMS *sms;
+
+/**
+ * A URL and title from a 'MEBKM:' or similar QR Code type. This property is only set if
+ * valueType is .url.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeURLBookmark *URL;
+
+/**
+ * Wi-Fi network parameters from a 'WIFI:' or similar QR Code type. This property is only set
+ * if valueType is .wifi.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeWiFi *wifi;
+
+/**
+ * GPS coordinates from a 'GEO:' or similar QR Code type. This property is only set if valueType
+ * is .geo.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeGeoPoint *geoPoint;
+
+/**
+ * A person's or organization's business card. For example a VCARD. This property is only set
+ * if valueType is .contactInfo.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeContactInfo *contactInfo;
+
+/**
+ * A calendar event extracted from a QR Code. This property is only set if valueType is
+ * .calendarEvent.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeCalendarEvent *calendarEvent;
+
+/**
+ * A driver license or ID card. This property is only set if valueType is .driverLicense.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionBarcodeDriverLicense *driverLicense;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 40 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcodeDetector.h

@@ -0,0 +1,40 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionBarcode;
+@class FIRVisionImage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A block containing an array of barcodes or `nil` if there's an error.
+ *
+ * @param barcodes Array of barcodes detected in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionBarcodeDetectionCallback)(NSArray<FIRVisionBarcode *> *_Nullable barcodes,
+                                                  NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionBarcodeDetectionCallback);
+
+/**
+ * A barcode detector that detects barcodes in an image.
+ */
+NS_SWIFT_NAME(VisionBarcodeDetector)
+@interface FIRVisionBarcodeDetector : NSObject
+
+/**
+ * Unavailable.  Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Detects barcodes in the given image.
+ *
+ * @param image The image to use for detecting barcodes.
+ * @param completion Handler to call back on the main queue with barcodes detected or error.
+ */
+- (void)detectInImage:(FIRVisionImage *)image
+           completion:(FIRVisionBarcodeDetectionCallback)completion;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 101 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionBarcodeDetectorOptions.h

@@ -0,0 +1,101 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @options VisionBarcodeFormat
+ * This option specifies the barcode formats that the library should detect.
+ */
+typedef NS_OPTIONS(NSInteger, FIRVisionBarcodeFormat) {
+  /**
+   * Unknown format.
+   */
+  FIRVisionBarcodeFormatUnKnown = 0,
+  /**
+   * All format.
+   */
+  FIRVisionBarcodeFormatAll = 0xFFFF,
+  /**
+   * Code-128 detection.
+   */
+  FIRVisionBarcodeFormatCode128 = 0x0001,
+  /**
+   * Code-39 detection.
+   */
+  FIRVisionBarcodeFormatCode39 = 0x0002,
+  /**
+   * Code-93 detection.
+   */
+  FIRVisionBarcodeFormatCode93 = 0x0004,
+  /**
+   * Codabar detection.
+   */
+  FIRVisionBarcodeFormatCodaBar = 0x0008,
+  /**
+   * Data Matrix detection.
+   */
+  FIRVisionBarcodeFormatDataMatrix = 0x0010,
+  /**
+   * EAN-13 detection.
+   */
+  FIRVisionBarcodeFormatEAN13 = 0x0020,
+  /**
+   * EAN-8 detection.
+   */
+  FIRVisionBarcodeFormatEAN8 = 0x0040,
+  /**
+   * ITF detection.
+   */
+  FIRVisionBarcodeFormatITF = 0x0080,
+  /**
+   * QR Code detection.
+   */
+  FIRVisionBarcodeFormatQRCode = 0x0100,
+  /**
+   * UPC-A detection.
+   */
+  FIRVisionBarcodeFormatUPCA = 0x0200,
+  /**
+   * UPC-E detection.
+   */
+  FIRVisionBarcodeFormatUPCE = 0x0400,
+  /**
+   * PDF-417 detection.
+   */
+  FIRVisionBarcodeFormatPDF417 = 0x0800,
+  /**
+   * Aztec code detection.
+   */
+  FIRVisionBarcodeFormatAztec = 0x1000,
+} NS_SWIFT_NAME(VisionBarcodeFormat);
+
+/**
+ * Options for specifying a Barcode detector.
+ */
+NS_SWIFT_NAME(VisionBarcodeDetectorOptions)
+@interface FIRVisionBarcodeDetectorOptions : NSObject
+
+/**
+ * The barcode formats detected in an image. Note that the detection time will increase for each
+ * additional format that is specified.
+ */
+@property(nonatomic, readonly) FIRVisionBarcodeFormat formats;
+
+/**
+ * Initializes an instance that detects all supported barcode formats.
+ *
+ * @return A new instance of Firebase barcode detector options.
+ */
+- (instancetype)init;
+
+/**
+ * Initializes an instance with the given barcode formats to look for.
+ *
+ * @param formats The barcode formats to initialize the barcode detector options.
+ * @return A new instance of Firebase barcode detector options.
+ */
+- (instancetype)initWithFormats:(FIRVisionBarcodeFormat)formats NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 41 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudDetectorOptions.h

@@ -0,0 +1,41 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionCloudModelType
+ * Type of model to use in vision cloud detection API.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionCloudModelType) {
+  /** Stable model. */
+  FIRVisionCloudModelTypeStable,
+  /** Latest model. */
+  FIRVisionCloudModelTypeLatest,
+} NS_SWIFT_NAME(VisionCloudModelType);
+
+/**
+ * Generic options of a vision cloud detector.
+ */
+NS_SWIFT_NAME(VisionCloudDetectorOptions)
+@interface FIRVisionCloudDetectorOptions : NSObject
+
+/**
+ * Type of model to use in vision cloud detection API.  Defaults to `.stable`.
+ */
+@property(nonatomic) FIRVisionCloudModelType modelType;
+
+/**
+ * Maximum number of results to return.  Defaults to 10.  Does not apply to
+ * `VisionCloudTextDetector` and `VisionCloudDocumentTextDetector`.
+ */
+@property(nonatomic) NSUInteger maxResults;
+
+/**
+ * API key to use for Cloud Vision API.  If `nil`, the default API key from FirebaseApp will be
+ * used.
+ */
+@property(nonatomic, copy, nullable) NSString *APIKeyOverride;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 29 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudDocumentTextRecognizerOptions.h

@@ -0,0 +1,29 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Options for a cloud document text recognizer.
+ */
+NS_SWIFT_NAME(VisionCloudDocumentTextRecognizerOptions)
+@interface FIRVisionCloudDocumentTextRecognizerOptions : NSObject
+
+/**
+ * An array of hinted language codes for cloud document text recognition. The default is `nil`. See
+ * https://cloud.google.com/vision/docs/languages for supported language codes.
+ */
+@property(nonatomic, copy, nullable) NSArray<NSString *> *languageHints;
+
+/**
+ * API key to use for Cloud Vision API. If `nil`, the default API key from FirebaseApp will be used.
+ */
+@property(nonatomic, copy, nullable) NSString *APIKeyOverride;
+
+/**
+ * Designated initializer that creates a new instance of cloud document text recognizer options.
+ */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 30 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudImageLabelerOptions.h

@@ -0,0 +1,30 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Options for a cloud image labeler. */
+NS_SWIFT_NAME(VisionCloudImageLabelerOptions)
+@interface FIRVisionCloudImageLabelerOptions : NSObject
+
+/**
+ * The confidence threshold for labels returned by the image labeler. Labels returned by the image
+ * labeler will have a confidence level higher or equal to the given threshold. Values must be in
+ * range [0, 1]. If unset or an invalid value is set, the default threshold of 0.5 is used. Up to 20
+ * labels with the top confidence will be returned.
+ */
+@property(nonatomic) float confidenceThreshold;
+
+/**
+ * API key to use for Cloud Vision API. If `nil`, the default API key from FirebaseApp will be used.
+ */
+@property(nonatomic, copy, nullable) NSString *APIKeyOverride;
+
+/**
+ * Designated initializer that creates a new instance of cloud image labeler options with the
+ * default values.
+ */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 49 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudLandmark.h

@@ -0,0 +1,49 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionLatitudeLongitude;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Set of landmark properties identified by a vision cloud detector.
+ */
+NS_SWIFT_NAME(VisionCloudLandmark)
+@interface FIRVisionCloudLandmark : NSObject
+
+/**
+ * Opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API]
+ * (https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, readonly, nullable) NSString *entityId;
+
+/**
+ * Textual description of the landmark.
+ */
+@property(nonatomic, copy, readonly, nullable) NSString *landmark;
+
+/**
+ * Overall confidence of the result. The value is float, in range [0, 1].
+ */
+@property(nonatomic, readonly, nullable) NSNumber *confidence;
+
+/**
+ * A rectangle image region to which this landmark belongs to (in the view coordinate system).
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * The location information for the detected landmark. Multiple LocationInfo elements can be present
+ * because one location may indicate the location of the scene in the image, and another location
+ * may indicate the location of the place where the image was taken.
+ */
+@property(nonatomic, readonly, nullable) NSArray<FIRVisionLatitudeLongitude *> *locations;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 40 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudLandmarkDetector.h

@@ -0,0 +1,40 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionCloudLandmark;
+@class FIRVisionImage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A block containing an array of landmark or `nil` if there's an error.
+ *
+ * @param landmarks Array of landmark detected in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionCloudLandmarkDetectionCompletion)(
+    NSArray<FIRVisionCloudLandmark *> *_Nullable landmarks, NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionCloudLandmarkDetectionCompletion);
+
+/**
+ * A landmark detector that detects landmark in an image.
+ */
+NS_SWIFT_NAME(VisionCloudLandmarkDetector)
+@interface FIRVisionCloudLandmarkDetector : NSObject
+
+/**
+ * Unavailable.  Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Detects landmark in a given image.
+ *
+ * @param image The image to use for detecting landmark.
+ * @param completion Handler to call back on the main queue with landmark detected or error.
+ */
+- (void)detectInImage:(FIRVisionImage *)image
+           completion:(FIRVisionCloudLandmarkDetectionCompletion)completion;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 49 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionCloudTextRecognizerOptions.h

@@ -0,0 +1,49 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionCloudTextModelType
+ * An enum of model types for cloud text recognition.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionCloudTextModelType) {
+  /**
+   * Sparse or regular text cloud model type.
+   */
+  FIRVisionCloudTextModelTypeSparse,
+  /**
+   * Dense or document text cloud model type.
+   */
+  FIRVisionCloudTextModelTypeDense,
+} NS_SWIFT_NAME(VisionCloudTextModelType);
+
+/**
+ * Options for a cloud text recognizer.
+ */
+NS_SWIFT_NAME(VisionCloudTextRecognizerOptions)
+@interface FIRVisionCloudTextRecognizerOptions : NSObject
+
+/**
+ * Model type for cloud text recognition. The default is `VisionCloudTextModelType.sparse`.
+ */
+@property(nonatomic) FIRVisionCloudTextModelType modelType;
+
+/**
+ * An array of hinted language codes for cloud text recognition. The default is `nil`. See
+ * https://cloud.google.com/vision/docs/languages for supported language codes.
+ */
+@property(nonatomic, copy, nullable) NSArray<NSString *> *languageHints;
+
+/**
+ * API key to use for Cloud Vision API. If `nil`, the default API key from FirebaseApp will be used.
+ */
+@property(nonatomic, copy, nullable) NSString *APIKeyOverride;
+
+/**
+ * Designated initializer that creates a new instance of cloud text recognizer options.
+ */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 30 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentText.h

@@ -0,0 +1,30 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionDocumentTextBlock;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Recognized document text in an image.
+ */
+NS_SWIFT_NAME(VisionDocumentText)
+@interface FIRVisionDocumentText : NSObject
+
+/**
+ * String representation of the recognized document text.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of blocks recognized in the document text.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionDocumentTextBlock *> *blocks;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 92 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextBlock.h

@@ -0,0 +1,92 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionDocumentTextParagraph;
+@class FIRVisionTextRecognizedBreak;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionDocumentTextBlockType
+ * An enum of document text block types.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionDocumentTextBlockType) {
+  /**
+   * Unknown document text block type.
+   */
+  FIRVisionDocumentTextBlockTypeUnknown,
+  /**
+   * Barcode document text block type.
+   */
+  FIRVisionDocumentTextBlockTypeBarcode,
+  /**
+   * Image document text block type.
+   */
+  FIRVisionDocumentTextBlockTypePicture,
+  /**
+   * Horizontal/vertical line box document text block type.
+   */
+  FIRVisionDocumentTextBlockTypeRuler,
+  /**
+   * Table document text block type.
+   */
+  FIRVisionDocumentTextBlockTypeTable,
+  /**
+   * Regular document text block type.
+   */
+  FIRVisionDocumentTextBlockTypeText,
+} NS_SWIFT_NAME(VisionDocumentTextBlockType);
+
+/**
+ * A document text block recognized in an image that consists of an array of paragraphs.
+ */
+NS_SWIFT_NAME(VisionDocumentTextBlock)
+@interface FIRVisionDocumentTextBlock : NSObject
+
+/**
+ * The detected block type.
+ */
+@property(nonatomic, readonly) FIRVisionDocumentTextBlockType type;
+
+/**
+ * String representation of the document text block that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of paragraphs in the block if the type is `VisionDocumentTextBlockType.text`. Otherwise,
+ * the array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionDocumentTextParagraph *> *paragraphs;
+
+/**
+ * The rectangle that contains the document text block relative to the image in the default
+ * coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * The confidence of the recognized document text block.
+ */
+@property(nonatomic, readonly) NSNumber *confidence;
+
+/**
+ * An array of recognized languages in the document text block. If no languages are recognized, the
+ * array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The recognized start or end of the document text block.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionTextRecognizedBreak *recognizedBreak;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 55 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextParagraph.h

@@ -0,0 +1,55 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionDocumentTextWord;
+@class FIRVisionTextRecognizedBreak;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A document text paragraph recognized in an image that consists of an array of words.
+ */
+NS_SWIFT_NAME(VisionDocumentTextParagraph)
+@interface FIRVisionDocumentTextParagraph : NSObject
+
+/**
+ * String representation of the document text paragraph that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of words in the document text paragraph.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionDocumentTextWord *> *words;
+
+/**
+ * The rectangle that contains the document text paragraph relative to the image in the default
+ * coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * The confidence of the recognized document text paragraph.
+ */
+@property(nonatomic, readonly) NSNumber *confidence;
+
+/**
+ * An array of recognized languages in the document text paragraph. If no languages are recognized,
+ * the array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The recognized start or end of the document text paragraph.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionTextRecognizedBreak *recognizedBreak;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 42 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextRecognizer.h

@@ -0,0 +1,42 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionDocumentText;
+@class FIRVisionImage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * The callback to invoke when the document text recognition completes.
+ *
+ * @param text Recognized document text in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionDocumentTextRecognitionCallback)(FIRVisionDocumentText *_Nullable text,
+                                                         NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionDocumentTextRecognitionCallback);
+
+/**
+ * A cloud document text recognizer that recognizes text in an image.
+ */
+NS_SWIFT_NAME(VisionDocumentTextRecognizer)
+@interface FIRVisionDocumentTextRecognizer : NSObject
+
+/**
+ * Unavailable. Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Processes the given image for cloud document text recognition.
+ *
+ * @param image The image to process for recognizing document text.
+ * @param completion Handler to call back on the main queue when document text recognition
+ *     completes.
+ */
+- (void)processImage:(FIRVisionImage *)image
+          completion:(FIRVisionDocumentTextRecognitionCallback)completion
+    NS_SWIFT_NAME(process(_:completion:));
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 49 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextSymbol.h

@@ -0,0 +1,49 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionTextRecognizedBreak;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A document text symbol recognized in an image.
+ */
+NS_SWIFT_NAME(VisionDocumentTextSymbol)
+@interface FIRVisionDocumentTextSymbol : NSObject
+
+/**
+ * String representation of the document text symbol that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * The rectangle that contains the document text symbol relative to the image in the default
+ * coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * The confidence of the recognized document text symbol.
+ */
+@property(nonatomic, readonly) NSNumber *confidence;
+
+/**
+ * An array of recognized languages in the document text symbol. If no languages are recognized, the
+ * array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The recognized start or end of the document text symbol.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionTextRecognizedBreak *recognizedBreak;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 55 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionDocumentTextWord.h

@@ -0,0 +1,55 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionDocumentTextSymbol;
+@class FIRVisionTextRecognizedBreak;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A document text word recognized in an image that consists of an array of symbols.
+ */
+NS_SWIFT_NAME(VisionDocumentTextWord)
+@interface FIRVisionDocumentTextWord : NSObject
+
+/**
+ * String representation of the document text word that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of symbols in the document text word.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionDocumentTextSymbol *> *symbols;
+
+/**
+ * The rectangle that contains the document text word relative to the image in the default
+ * coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * The confidence of the recognized document text word.
+ */
+@property(nonatomic, readonly) NSNumber *confidence;
+
+/**
+ * An array of recognized languages in the document text word. If no languages are recognized, the
+ * array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The recognized start or end of the document text word.
+ */
+@property(nonatomic, readonly, nullable) FIRVisionTextRecognizedBreak *recognizedBreak;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 106 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFace.h

@@ -0,0 +1,106 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+#import "FIRVisionFaceContour.h"
+#import "FIRVisionFaceLandmark.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A human face detected in an image.
+ */
+NS_SWIFT_NAME(VisionFace)
+@interface FIRVisionFace : NSObject
+
+/**
+ * The rectangle that holds the discovered relative to the detected image in the view
+ * coordinate system.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * Indicates whether the face has a tracking ID.
+ */
+@property(nonatomic, readonly) BOOL hasTrackingID;
+
+/**
+ * The tracking identifier of the face.
+ */
+@property(nonatomic, readonly) NSInteger trackingID;
+
+/**
+ * Indicates whether the detector found the head y euler angle.
+ */
+@property(nonatomic, readonly) BOOL hasHeadEulerAngleY;
+
+/**
+ * Indicates the rotation of the face about the vertical axis of the image.  Positive y euler angle
+ * is when the face is turned towards the right side of the image that is being processed.
+ */
+@property(nonatomic, readonly) CGFloat headEulerAngleY;
+
+/**
+ * Indicates whether the detector found the head z euler angle.
+ */
+@property(nonatomic, readonly) BOOL hasHeadEulerAngleZ;
+
+/**
+ * Indicates the rotation of the face about the axis pointing out of the image.  Positive z euler
+ * angle is a counter-clockwise rotation within the image plane.
+ */
+@property(nonatomic, readonly) CGFloat headEulerAngleZ;
+
+/**
+ * Indicates whether a smiling probability is available.
+ */
+@property(nonatomic, readonly) BOOL hasSmilingProbability;
+
+/**
+ * Probability that the face is smiling.
+ */
+@property(nonatomic, readonly) CGFloat smilingProbability;
+
+/**
+ * Indicates whether a left eye open probability is available.
+ */
+@property(nonatomic, readonly) BOOL hasLeftEyeOpenProbability;
+
+/**
+ * Probability that the face's left eye is open.
+ */
+@property(nonatomic, readonly) CGFloat leftEyeOpenProbability;
+
+/**
+ * Indicates whether a right eye open probability is available.
+ */
+@property(nonatomic, readonly) BOOL hasRightEyeOpenProbability;
+
+/**
+ * Probability that the face's right eye is open.
+ */
+@property(nonatomic, readonly) CGFloat rightEyeOpenProbability;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Returns the landmark, if any, of the given type in this detected face.
+ *
+ * @param type The type of the facial landmark.
+ * @return The landmark of the given type in this face.  nil if there isn't one.
+ */
+- (nullable FIRVisionFaceLandmark *)landmarkOfType:(FIRFaceLandmarkType)type;
+
+/**
+ * Returns the contour, if any, of the given type in this detected face.
+ *
+ * @param type The type of the facial contour.
+ * @return The contour of the given type in this face.  nil if there isn't one.
+ */
+- (nullable FIRVisionFaceContour *)contourOfType:(FIRFaceContourType)type;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 77 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceContour.h

@@ -0,0 +1,77 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionPoint;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Facial contour types.
+ */
+typedef NSString *FIRFaceContourType NS_EXTENSIBLE_STRING_ENUM NS_SWIFT_NAME(FaceContourType);
+
+/** All contour points including left and right cheeks. */
+extern FIRFaceContourType const FIRFaceContourTypeAll;
+
+/** A set of points that outline the face oval. */
+extern FIRFaceContourType const FIRFaceContourTypeFace;
+
+/** A set of points that outline the top of the left eyebrow. */
+extern FIRFaceContourType const FIRFaceContourTypeLeftEyebrowTop;
+
+/** A set of points that outline the bottom of the left eyebrow. */
+extern FIRFaceContourType const FIRFaceContourTypeLeftEyebrowBottom;
+
+/** A set of points that outline the top of the right eyebrow. */
+extern FIRFaceContourType const FIRFaceContourTypeRightEyebrowTop;
+
+/** A set of points that outline the bottom of the right eyebrow. */
+extern FIRFaceContourType const FIRFaceContourTypeRightEyebrowBottom;
+
+/** A set of points that outline the left eye. */
+extern FIRFaceContourType const FIRFaceContourTypeLeftEye;
+
+/** A set of points that outline the right eye. */
+extern FIRFaceContourType const FIRFaceContourTypeRightEye;
+
+/** A set of points that outline the top of the upper lip. */
+extern FIRFaceContourType const FIRFaceContourTypeUpperLipTop;
+
+/** A set of points that outline the bottom of the upper lip. */
+extern FIRFaceContourType const FIRFaceContourTypeUpperLipBottom;
+
+/** A set of points that outline the top of the lower lip. */
+extern FIRFaceContourType const FIRFaceContourTypeLowerLipTop;
+
+/** A set of points that outline the bottom of the lower lip. */
+extern FIRFaceContourType const FIRFaceContourTypeLowerLipBottom;
+
+/** A set of points that outline the nose bridge. */
+extern FIRFaceContourType const FIRFaceContourTypeNoseBridge;
+
+/** A set of points that outline the bottom of the nose. */
+extern FIRFaceContourType const FIRFaceContourTypeNoseBottom;
+
+/**
+ * A contour on a human face detected in an image.
+ */
+NS_SWIFT_NAME(VisionFaceContour)
+@interface FIRVisionFaceContour : NSObject
+
+/**
+ * The facial contour type.
+ */
+@property(nonatomic, readonly) FIRFaceContourType type;
+
+/**
+ * An array of 2D points that make up the facial contour.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionPoint *> *points;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 55 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceDetector.h

@@ -0,0 +1,55 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionFace;
+@class FIRVisionImage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A block containing an array of faces or `nil` if there's an error.
+ *
+ * @param faces Array of faces detected in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionFaceDetectionCallback)(NSArray<FIRVisionFace *> *_Nullable faces,
+                                               NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionFaceDetectionCallback);
+
+/**
+ * A face detector that detects faces in an image.
+ */
+NS_SWIFT_NAME(VisionFaceDetector)
+@interface FIRVisionFaceDetector : NSObject
+
+/**
+ * Unavailable. Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Processes the given image for face detection. The detection is performed asynchronously and calls
+ * back the completion handler with the detected face results or error on the main thread.
+ *
+ * @param image The vision image to use for detecting faces.
+ * @param completion Handler to call back on the main thread with faces detected or error.
+ */
+- (void)processImage:(FIRVisionImage *)image
+          completion:(FIRVisionFaceDetectionCallback)completion
+    NS_SWIFT_NAME(process(_:completion:));
+
+/**
+ * Returns detected face results in the given image or `nil` if there was an error. The detection is
+ * performed synchronously on the calling thread.
+ *
+ * @discussion It is advised to call this method off the main thread to avoid blocking the UI. As a
+ *     result, an `NSException` is raised if this method is called on the main thread.
+ * @param image The vision image to use for detecting faces.
+ * @param error An optional error parameter populated when there is an error during detection.
+ * @return Array of faces detected in the given image or `nil` if there was an error.
+ */
+- (nullable NSArray<FIRVisionFace *> *)resultsInImage:(FIRVisionImage *)image
+                                                error:(NSError **)error;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 124 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceDetectorOptions.h

@@ -0,0 +1,124 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionFaceDetectorClassificationMode
+ * Classification mode for face detection.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionFaceDetectorClassificationMode) {
+  /**
+   * Face classification mode indicating that the detector performs no classification.
+   */
+  FIRVisionFaceDetectorClassificationModeNone = 1,
+  /**
+   * Face classification mode indicating that the detector performs all classifications.
+   */
+  FIRVisionFaceDetectorClassificationModeAll,
+} NS_SWIFT_NAME(VisionFaceDetectorClassificationMode);
+
+/**
+ * @enum VisionFaceDetectorPerformanceMode
+ * Performance preference for accuracy or speed of face detection.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionFaceDetectorPerformanceMode) {
+  /**
+   * Face detection performance mode that runs faster, but may detect fewer faces and/or return
+   * results with lower accuracy.
+   */
+  FIRVisionFaceDetectorPerformanceModeFast = 1,
+  /**
+   * Face detection performance mode that runs slower, but may detect more faces and/or return
+   * results with higher accuracy.
+   */
+  FIRVisionFaceDetectorPerformanceModeAccurate,
+} NS_SWIFT_NAME(VisionFaceDetectorPerformanceMode);
+
+/**
+ * @enum VisionFaceDetectorLandmarkMode
+ * Landmark detection mode for face detection.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionFaceDetectorLandmarkMode) {
+  /**
+   * Face landmark mode indicating that the detector performs no landmark detection.
+   */
+  FIRVisionFaceDetectorLandmarkModeNone = 1,
+  /**
+   * Face landmark mode indicating that the detector performs landmark detection.
+   */
+  FIRVisionFaceDetectorLandmarkModeAll,
+} NS_SWIFT_NAME(VisionFaceDetectorLandmarkMode);
+
+/**
+ * @enum VisionFaceDetectorContourMode
+ * Contour detection mode for face detection.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionFaceDetectorContourMode) {
+  /**
+   * Face contour mode indicating that the detector performs no contour detection.
+   */
+  FIRVisionFaceDetectorContourModeNone = 1,
+  /**
+   * Face contour mode indicating that the detector performs contour detection.
+   */
+  FIRVisionFaceDetectorContourModeAll,
+} NS_SWIFT_NAME(VisionFaceDetectorContourMode);
+
+/**
+ * Options for specifying a face detector.
+ */
+NS_SWIFT_NAME(VisionFaceDetectorOptions)
+@interface FIRVisionFaceDetectorOptions : NSObject
+
+/**
+ * The face detector classification mode for characterizing attributes such as smiling. Defaults to
+ * `.none`.
+ */
+@property(nonatomic) FIRVisionFaceDetectorClassificationMode classificationMode;
+
+/**
+ * The face detector performance mode that determines the accuracy of the results and the speed of
+ * the detection. Defaults to `.fast`.
+ */
+@property(nonatomic) FIRVisionFaceDetectorPerformanceMode performanceMode;
+
+/**
+ * The face detector landmark mode that determines the type of landmark results returned by
+ * detection. Defaults to `.none`.
+ */
+@property(nonatomic) FIRVisionFaceDetectorLandmarkMode landmarkMode;
+
+/**
+ * The face detector contour mode that determines the type of contour results returned by detection.
+ * Defaults to `.none`.
+ *
+ * <p>The following detection results are returned when setting this mode to `.all`:
+ *
+ * <p>`performanceMode` set to `.fast`, and both `classificationMode` and `landmarkMode` set to
+ * `.none`, then only the prominent face will be returned with detected contours.
+ *
+ * <p>`performanceMode` set to `.accurate`, or if `classificationMode` or `landmarkMode` is set to
+ * `.all`, then all detected faces will be returned, but only the prominent face will have
+ * detecteted contours.
+ */
+@property(nonatomic) FIRVisionFaceDetectorContourMode contourMode;
+
+/**
+ * The smallest desired face size. The size is expressed as a proportion of the width of the head to
+ * the image width. For example, if a value of 0.1 is specified, then the smallest face to search
+ * for is roughly 10% of the width of the image being searched. Defaults to 0.1. This option does
+ * not apply to contour detection.
+ */
+@property(nonatomic) CGFloat minFaceSize;
+
+/**
+ * Whether the face tracking feature is enabled for face detection. Defaults to NO. When
+ * `performanceMode` is set to `.fast`, and both `classificationMode` and `landmarkMode` set to
+ * `.none`, this option will be ignored and tracking will be disabled.
+ */
+@property(nonatomic, getter=isTrackingEnabled) BOOL trackingEnabled;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 66 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionFaceLandmark.h

@@ -0,0 +1,66 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionPoint;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Type of all facial landmarks.
+ */
+typedef NSString *FIRFaceLandmarkType NS_EXTENSIBLE_STRING_ENUM NS_SWIFT_NAME(FaceLandmarkType);
+
+/** Center of the bottom lip. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeMouthBottom;
+
+/** Right corner of the mouth */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeMouthRight;
+
+/** Left corner of the mouth */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeMouthLeft;
+
+/** Midpoint of the left ear tip and left ear lobe. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeLeftEar;
+
+/** Midpoint of the right ear tip and right ear lobe. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeRightEar;
+
+/** Left eye. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeLeftEye;
+
+/** Right eye. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeRightEye;
+
+/** Left cheek. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeLeftCheek;
+
+/** Right cheek. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeRightCheek;
+
+/** Midpoint between the nostrils where the nose meets the face. */
+extern FIRFaceLandmarkType const FIRFaceLandmarkTypeNoseBase;
+
+/**
+ * A landmark on a human face detected in an image.
+ */
+NS_SWIFT_NAME(VisionFaceLandmark)
+@interface FIRVisionFaceLandmark : NSObject
+
+/**
+ * The type of the facial landmark.
+ */
+@property(nonatomic, readonly) FIRFaceLandmarkType type;
+
+/**
+ * 2D position of the facial landmark.
+ */
+@property(nonatomic, readonly) FIRVisionPoint *position;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 53 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImage.h

@@ -0,0 +1,53 @@
+#import <CoreMedia/CoreMedia.h>
+#import <UIKit/UIKit.h>
+
+@class FIRVisionImageMetadata;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * An image or image buffer used in vision detection, with optional metadata.
+ */
+NS_SWIFT_NAME(VisionImage)
+@interface FIRVisionImage : NSObject
+
+/**
+ * Metadata about the image (e.g. image orientation). If metadata is not specified, the default
+ * metadata values are used.
+ */
+@property(nonatomic, nullable) FIRVisionImageMetadata *metadata;
+
+/**
+ * Initializes a VisionImage object with the given image.
+ *
+ * @param image Image to use in vision detection. The given image should be rotated, so its
+ *      `imageOrientation` property is set to `UIImageOrientationUp` value. The `UIImage` must have
+ *      non-NULL `CGImage` property.
+ * @return A VisionImage instance with the given image.
+ */
+- (instancetype)initWithImage:(UIImage *)image NS_DESIGNATED_INITIALIZER;
+
+/**
+ * Initializes a VisionImage object with the given image buffer. To improve performance, it is
+ * recommended to minimize the lifespan and number of instances of this class when initializing with
+ * a `CMSampleBufferRef`.
+ *
+ * @param sampleBuffer Image buffer to use in vision detection. The buffer must be based on
+ *       a pixel buffer (not compressed data), and the pixel format must be one of:
+ *         - `kCVPixelFormatType_32BGRA`
+ *         - `kCVPixelFormatType_420YpCbCr8BiPlanarFullRange`
+ *         - `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`
+ *       In practice: this works with the video output of the phone's camera, but not other
+ *       arbitrary sources of `CMSampleBufferRef`s.
+ * @return A VisionImage instance with the given image buffer.
+ */
+- (instancetype)initWithBuffer:(CMSampleBufferRef)sampleBuffer NS_DESIGNATED_INITIALIZER;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 39 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageLabel.h

@@ -0,0 +1,39 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Represents a label for an image.
+ */
+NS_SWIFT_NAME(VisionImageLabel)
+@interface FIRVisionImageLabel : NSObject
+
+/**
+ * The human readable label text in American English. For example: "Balloon".
+ *
+ * @discussion This string is not fit for display purposes, as it is not localized. Use the
+ *     `entityID` and query the Knowledge Graph to get a localized description of the label text.
+ */
+@property(nonatomic, copy, readonly) NSString *text;
+
+/**
+ * Confidence for the label in range [0, 1]. The value is a `floatValue`.
+ */
+@property(nonatomic, readonly, nullable) NSNumber *confidence;
+
+/**
+ * Opaque entity ID used to query the Knowledge Graph to get a localized description of the label
+ * text. Some IDs may be available in [Google Knowledge Graph Search API]
+ * (https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, readonly, nullable) NSString *entityID;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 55 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageLabeler.h

@@ -0,0 +1,55 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionImageLabel;
+@class FIRVisionImage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A block containing an array of labels or `nil` if there's an error.
+ *
+ * @param labels Array of labels detected in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionImageLabelerCallback)(NSArray<FIRVisionImageLabel *> *_Nullable labels,
+                                              NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionImageLabelerCallback);
+
+/**
+ * @enum VisionImageLabelerType
+ * An enum of image labeler types.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionImageLabelerType) {
+  /** On-device image labeler type. */
+  FIRVisionImageLabelerTypeOnDevice,
+  /** Cloud image labeler type. */
+  FIRVisionImageLabelerTypeCloud,
+} NS_SWIFT_NAME(VisionImageLabelerType);
+
+/**
+ * An on-device or cloud image labeler for labeling images.
+ */
+NS_SWIFT_NAME(VisionImageLabeler)
+@interface FIRVisionImageLabeler : NSObject
+
+/** The image labeler type. */
+@property(nonatomic, readonly) FIRVisionImageLabelerType type;
+
+/**
+ * Unavailable. Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Processes the given image for on-device or cloud image labeling.
+ *
+ * @param image The image to process.
+ * @param completion Handler to call back on the main queue with labels or error.
+ */
+- (void)processImage:(FIRVisionImage *)image
+          completion:(FIRVisionImageLabelerCallback)completion
+    NS_SWIFT_NAME(process(_:completion:));
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 59 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionImageMetadata.h

@@ -0,0 +1,59 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionDetectorImageOrientation
+ * This enum specifies where the origin (0,0) of the image is located. The constant has the same
+ * value as defined by EXIF specifications.
+ */
+typedef NS_ENUM(NSUInteger, FIRVisionDetectorImageOrientation) {
+  /**
+   * Orientation code indicating the 0th row is the top and the 0th column is the left side.
+   */
+  FIRVisionDetectorImageOrientationTopLeft = 1,
+  /**
+   * Orientation code indicating the 0th row is the top and the 0th column is the right side.
+   */
+  FIRVisionDetectorImageOrientationTopRight,
+  /**
+   * Orientation code indicating the 0th row is the bottom and the 0th column is the right side.
+   */
+  FIRVisionDetectorImageOrientationBottomRight,
+  /**
+   * Orientation code indicating the 0th row is the bottom and the 0th column is the left side.
+   */
+  FIRVisionDetectorImageOrientationBottomLeft,
+  /**
+   * Orientation code indicating the 0th row is the left side and the 0th column is the top.
+   */
+  FIRVisionDetectorImageOrientationLeftTop,
+  /**
+   * Orientation code indicating the 0th row is the right side and the 0th column is the top.
+   */
+  FIRVisionDetectorImageOrientationRightTop,
+  /**
+   * Orientation code indicating the 0th row is the right side and the 0th column is the bottom.
+   */
+  FIRVisionDetectorImageOrientationRightBottom,
+  /**
+   * Orientation code indicating the 0th row is the left side and the 0th column is the
+   * bottom.
+   */
+  FIRVisionDetectorImageOrientationLeftBottom,
+} NS_SWIFT_NAME(VisionDetectorImageOrientation);
+
+/**
+ * Metadata of an image used in feature detection.
+ */
+NS_SWIFT_NAME(VisionImageMetadata)
+@interface FIRVisionImageMetadata : NSObject
+
+/**
+ * The display orientation of the image. Defaults to `.topLeft`.
+ */
+@property(nonatomic) FIRVisionDetectorImageOrientation orientation;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 41 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionLatitudeLongitude.h

@@ -0,0 +1,41 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * An object representing a latitude/longitude pair.  This is expressed as a pair of doubles
+ * representing degrees latitude and degrees longitude.  Unless specified otherwise, this must
+ * conform to the <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
+ * standard</a>.  Values must be within normalized ranges.
+ */
+NS_SWIFT_NAME(VisionLatitudeLongitude)
+@interface FIRVisionLatitudeLongitude : NSObject
+
+/**
+ * The latitude in degrees. It must be in the range [-90.0, +90.0]. The value is double.
+ */
+@property(nonatomic, nullable) NSNumber *latitude;
+
+/**
+ * The longitude in degrees. It must be in the range [-180.0, +180.0]. The value is double.
+ */
+@property(nonatomic, nullable) NSNumber *longitude;
+
+/**
+ * Initializes a VisionLatitudeLongitude with the given latitude and longitude.
+ *
+ * @param latitude Latitude of the location.  The value is double.
+ * @param longitude Longitude of the location.  The value is double.
+ * @return A VisionLatitudeLongitude instance with the given latigude and longitude.
+ */
+- (instancetype)initWithLatitude:(nullable NSNumber *)latitude
+                       longitude:(nullable NSNumber *)longitude NS_DESIGNATED_INITIALIZER;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 25 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionOnDeviceImageLabelerOptions.h

@@ -0,0 +1,25 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/** Options for an on-device image labeler. */
+NS_SWIFT_NAME(VisionOnDeviceImageLabelerOptions)
+@interface FIRVisionOnDeviceImageLabelerOptions : NSObject
+
+/**
+ * The confidence threshold for labels returned by the image labeler. Labels returned by the image
+ * labeler will have a confidence level higher or equal to the given threshold. Values must be in
+ * range [0, 1]. If unset or an invalid value is set, the default threshold of 0.5 is used. There is
+ * no limit on the maximum number of labels returned by an on-device image labeler.
+ */
+@property(nonatomic) float confidenceThreshold;
+
+/**
+ * Designated initializer that creates a new instance of on-device image labeler options with the
+ * default values.
+ */
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 35 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionPoint.h

@@ -0,0 +1,35 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A 2D or 3D point in the image.  A valid point must have both x and y coordinates.  The point's
+ * coordinates are in the same scale as the original image.
+ */
+NS_SWIFT_NAME(VisionPoint)
+@interface FIRVisionPoint : NSObject
+
+/**
+ * X coordinate.  The value is float.
+ */
+@property(nonatomic, readonly) NSNumber *x;
+
+/**
+ * Y coordinate.  The value is float.
+ */
+@property(nonatomic, readonly) NSNumber *y;
+
+/**
+ * Z coordinate (or depth).  The value is float.  Z is nil if it is a 2D point.
+ */
+@property(nonatomic, readonly, nullable) NSNumber *z;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
+

+ 30 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionText.h

@@ -0,0 +1,30 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionTextBlock;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Recognized text in an image.
+ */
+NS_SWIFT_NAME(VisionText)
+@interface FIRVisionText : NSObject
+
+/**
+ * String representation of the recognized text.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of blocks recognized in the text.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextBlock *> *blocks;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 57 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextBlock.h

@@ -0,0 +1,57 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionTextLine;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A text block recognized in an image that consists of an array of text lines.
+ */
+NS_SWIFT_NAME(VisionTextBlock)
+@interface FIRVisionTextBlock : NSObject
+
+/**
+ * String representation of the text block that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of text lines that make up the block.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextLine *> *lines;
+
+/**
+ * The rectangle that contains the text block relative to the image in the default coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * An array of recognized languages in the text block. On-device text recognizers only detect
+ * Latin-based languages, while cloud text recognizers can detect multiple languages. If no
+ * languages are recognized, the array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The four corner points of the text block in clockwise order starting with the top left point
+ * relative to the image in the default coordinate space. The `NSValue` objects are `CGPoint`s. For
+ * cloud text recognizers, the array is `nil`.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * The confidence of the recognized text block. The value is `nil` for all text recognizers except
+ * for cloud text recognizers with model type `VisionCloudTextModelType.dense`.
+ */
+@property(nonatomic, readonly, nullable) NSNumber *confidence;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 51 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextElement.h

@@ -0,0 +1,51 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A text element recognized in an image. A text element is roughly equivalent to a space-separated
+ * word in most Latin-script languages.
+ */
+NS_SWIFT_NAME(VisionTextElement)
+@interface FIRVisionTextElement : NSObject
+
+/**
+ * String representation of the text element that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * The rectangle that contains the text element relative to the image in the default coordinate
+ * space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * An array of recognized languages in the text element. (Cloud API only.)
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The four corner points of the text element in clockwise order starting with the top left point
+ * relative to the image in the default coordinate space. The `NSValue` objects are `CGPoint`s. For
+ * cloud text recognizers, the array is `nil`.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * The confidence of the recognized text element. The value is `nil` for all text recognizers except
+ * for cloud text recognizers with model type `VisionCloudTextModelType.dense`.
+ */
+@property(nonatomic, readonly, nullable) NSNumber *confidence;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 57 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextLine.h

@@ -0,0 +1,57 @@
+#import <CoreGraphics/CoreGraphics.h>
+#import <Foundation/Foundation.h>
+
+@class FIRVisionTextElement;
+@class FIRVisionTextRecognizedLanguage;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * A text line recognized in an image that consists of an array of elements.
+ */
+NS_SWIFT_NAME(VisionTextLine)
+@interface FIRVisionTextLine : NSObject
+
+/**
+ * String representation of the text line that was recognized.
+ */
+@property(nonatomic, readonly) NSString *text;
+
+/**
+ * An array of text elements that make up the line.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextElement *> *elements;
+
+/**
+ * The rectangle that contains the text line relative to the image in the default coordinate space.
+ */
+@property(nonatomic, readonly) CGRect frame;
+
+/**
+ * An array of recognized languages in the text line. On-device text recognizers only detect
+ * Latin-based languages, while cloud text recognizers can detect multiple languages. If no
+ * languages are recognized, the array is empty.
+ */
+@property(nonatomic, readonly) NSArray<FIRVisionTextRecognizedLanguage *> *recognizedLanguages;
+
+/**
+ * The four corner points of the text line in clockwise order starting with the top left point
+ * relative to the image in the default coordinate space. The `NSValue` objects are `CGPoint`s. For
+ * cloud text recognizers, the array is `nil`.
+ */
+@property(nonatomic, readonly, nullable) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * The confidence of the recognized text line. The value is `nil` for all text recognizers except
+ * for cloud text recognizers with model type `VisionCloudTextModelType.dense`.
+ */
+@property(nonatomic, readonly, nullable) NSNumber *confidence;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 60 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizedBreak.h

@@ -0,0 +1,60 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * @enum VisionTextRecognizedBreakType
+ * An enum of recognized text break types.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionTextRecognizedBreakType) {
+  /**
+   * Unknown break type.
+   */
+  FIRVisionTextRecognizedBreakTypeUnknown,
+  /**
+   * Line-wrapping break type.
+   */
+  FIRVisionTextRecognizedBreakTypeLineWrap,
+  /**
+   * Hyphen break type.
+   */
+  FIRVisionTextRecognizedBreakTypeHyphen,
+  /**
+   * Line break that ends a paragraph.
+   */
+  FIRVisionTextRecognizedBreakTypeLineBreak,
+  /**
+   * Space break type.
+   */
+  FIRVisionTextRecognizedBreakTypeSpace,
+  /**
+   * Sure space break type.
+   */
+  FIRVisionTextRecognizedBreakTypeSureSpace,
+} NS_SWIFT_NAME(VisionTextRecognizedBreakType);
+
+/**
+ * Detected break from text recognition.
+ */
+NS_SWIFT_NAME(VisionTextRecognizedBreak)
+@interface FIRVisionTextRecognizedBreak : NSObject
+
+/**
+ *  The recognized text break type.
+ */
+@property(nonatomic, readonly) FIRVisionTextRecognizedBreakType type;
+
+/**
+ * Indicates whether the break prepends the text element. If `NO`, the break comes after the text
+ * element.
+ */
+@property(nonatomic, readonly) BOOL isPrefix;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 24 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizedLanguage.h

@@ -0,0 +1,24 @@
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * Detected language from text recognition.
+ */
+NS_SWIFT_NAME(VisionTextRecognizedLanguage)
+@interface FIRVisionTextRecognizedLanguage : NSObject
+
+/**
+ *  The BCP-47 language code, such as, "en-US" or "sr-Latn". For more information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, readonly, nullable) NSString *languageCode;
+
+/**
+ * Unavailable.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 61 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FIRVisionTextRecognizer.h

@@ -0,0 +1,61 @@
+#import <Foundation/Foundation.h>
+
+@class FIRVisionImage;
+@class FIRVisionText;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ * The callback to invoke when the text recognition completes.
+ *
+ * @param text Recognized text in the image or `nil` if there was an error.
+ * @param error The error or `nil`.
+ */
+typedef void (^FIRVisionTextRecognitionCallback)(FIRVisionText *_Nullable text,
+                                                 NSError *_Nullable error)
+    NS_SWIFT_NAME(VisionTextRecognitionCallback);
+
+/**
+ * @enum VisionTextRecognizerType
+ * An enum of text recognizer types.
+ */
+typedef NS_ENUM(NSInteger, FIRVisionTextRecognizerType) {
+  /**
+   * On-Device recognizer type.
+   */
+  FIRVisionTextRecognizerTypeOnDevice,
+  /**
+   * Cloud recognizer type.
+   */
+  FIRVisionTextRecognizerTypeCloud,
+} NS_SWIFT_NAME(VisionTextRecognizerType);
+
+/**
+ * An on-device or cloud text recognizer that recognizes text in an image.
+ */
+NS_SWIFT_NAME(VisionTextRecognizer)
+@interface FIRVisionTextRecognizer : NSObject
+
+/**
+ * The text recognizer type.
+ */
+@property(nonatomic, readonly) FIRVisionTextRecognizerType type;
+
+/**
+ * Unavailable. Use `Vision` factory methods.
+ */
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ * Processes the given image for on-device or cloud text recognition.
+ *
+ * @param image The image to process for recognizing text.
+ * @param completion Handler to call back on the main queue when text recognition completes.
+ */
+- (void)processImage:(FIRVisionImage *)image
+          completion:(FIRVisionTextRecognitionCallback)completion
+    NS_SWIFT_NAME(process(_:completion:));
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 35 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Headers/FirebaseMLVision.h

@@ -0,0 +1,35 @@
+#import "FIRVision.h"
+#import "FIRVisionBarcode.h"
+#import "FIRVisionBarcodeDetector.h"
+#import "FIRVisionBarcodeDetectorOptions.h"
+#import "FIRVisionCloudDetectorOptions.h"
+#import "FIRVisionCloudDocumentTextRecognizerOptions.h"
+#import "FIRVisionCloudImageLabelerOptions.h"
+#import "FIRVisionCloudLandmark.h"
+#import "FIRVisionCloudLandmarkDetector.h"
+#import "FIRVisionCloudTextRecognizerOptions.h"
+#import "FIRVisionDocumentText.h"
+#import "FIRVisionDocumentTextBlock.h"
+#import "FIRVisionDocumentTextParagraph.h"
+#import "FIRVisionDocumentTextRecognizer.h"
+#import "FIRVisionDocumentTextSymbol.h"
+#import "FIRVisionDocumentTextWord.h"
+#import "FIRVisionFace.h"
+#import "FIRVisionFaceContour.h"
+#import "FIRVisionFaceDetector.h"
+#import "FIRVisionFaceDetectorOptions.h"
+#import "FIRVisionFaceLandmark.h"
+#import "FIRVisionImage.h"
+#import "FIRVisionImageLabel.h"
+#import "FIRVisionImageLabeler.h"
+#import "FIRVisionImageMetadata.h"
+#import "FIRVisionLatitudeLongitude.h"
+#import "FIRVisionOnDeviceImageLabelerOptions.h"
+#import "FIRVisionPoint.h"
+#import "FIRVisionText.h"
+#import "FIRVisionTextBlock.h"
+#import "FIRVisionTextElement.h"
+#import "FIRVisionTextLine.h"
+#import "FIRVisionTextRecognizedBreak.h"
+#import "FIRVisionTextRecognizedLanguage.h"
+#import "FIRVisionTextRecognizer.h"

+ 18 - 0
Libraries external/Firebase/MLVision/FirebaseMLVision.framework/Modules/module.modulemap

@@ -0,0 +1,18 @@
+framework module FirebaseMLVision {
+  umbrella header "FirebaseMLVision.h"
+  export *
+  module * { export * }
+  link "sqlite3"
+  link "z"
+  link framework "AVFoundation"
+  link framework "Accelerate"
+  link framework "CoreGraphics"
+  link framework "CoreMedia"
+  link framework "CoreVideo"
+  link framework "Foundation"
+  link framework "LocalAuthentication"
+  link framework "QuartzCore"
+  link framework "Security"
+  link framework "SystemConfiguration"
+  link framework "UIKit"
+}

BIN
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/GTMSessionFetcher


+ 52 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMGatherInputStream.h

@@ -0,0 +1,52 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The GTMGatherInput stream is an input stream implementation that is to be
+// instantiated with an NSArray of NSData objects.  It works in the traditional
+// scatter/gather vector I/O model.  Rather than allocating a big NSData object
+// to hold all of the data and performing a copy into that object, the
+// GTMGatherInputStream will maintain a reference to the NSArray and read from
+// each NSData in turn as the read method is called.  You should not alter the
+// underlying set of NSData objects until all read operations on this input
+// stream have completed.
+
+#import <Foundation/Foundation.h>
+
+#ifndef GTM_NONNULL
+  #if defined(__has_attribute)
+    #if __has_attribute(nonnull)
+      #define GTM_NONNULL(x) __attribute__((nonnull x))
+    #else
+      #define GTM_NONNULL(x)
+    #endif
+  #else
+    #define GTM_NONNULL(x)
+  #endif
+#endif
+
+// Avoid multiple declaration of this class.
+//
+// Note: This should match the declaration of GTMGatherInputStream in GTMMIMEDocument.m
+
+#ifndef GTM_GATHERINPUTSTREAM_DECLARED
+#define GTM_GATHERINPUTSTREAM_DECLARED
+
+@interface GTMGatherInputStream : NSInputStream <NSStreamDelegate>
+
++ (NSInputStream *)streamWithArray:(NSArray *)dataArray GTM_NONNULL((1));
+
+@end
+
+#endif  // GTM_GATHERINPUTSTREAM_DECLARED

+ 148 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMMIMEDocument.h

@@ -0,0 +1,148 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This is a simple class to create or parse a MIME document.
+
+// To create a MIME document, allocate a new GTMMIMEDocument and start adding parts.
+// When you are done adding parts, call generateInputStream or generateDispatchData.
+//
+// A good reference for MIME is http://en.wikipedia.org/wiki/MIME
+
+#import <Foundation/Foundation.h>
+
+#ifndef GTM_NONNULL
+  #if defined(__has_attribute)
+    #if __has_attribute(nonnull)
+      #define GTM_NONNULL(x) __attribute__((nonnull x))
+    #else
+      #define GTM_NONNULL(x)
+    #endif
+  #else
+    #define GTM_NONNULL(x)
+  #endif
+#endif
+
+#ifndef GTM_DECLARE_GENERICS
+  #if __has_feature(objc_generics)
+    #define GTM_DECLARE_GENERICS 1
+  #else
+    #define GTM_DECLARE_GENERICS 0
+  #endif
+#endif
+
+#ifndef GTM_NSArrayOf
+  #if GTM_DECLARE_GENERICS
+    #define GTM_NSArrayOf(value) NSArray<value>
+    #define GTM_NSDictionaryOf(key, value) NSDictionary<key, value>
+  #else
+    #define GTM_NSArrayOf(value) NSArray
+    #define GTM_NSDictionaryOf(key, value) NSDictionary
+  #endif // GTM_DECLARE_GENERICS
+#endif  // GTM_NSArrayOf
+
+
+// GTMMIMEDocumentPart represents a part of a MIME document.
+//
+// +[GTMMIMEDocument MIMEPartsWithBoundary:data:] returns an array of these.
+@interface GTMMIMEDocumentPart : NSObject
+
+@property(nonatomic, readonly) GTM_NSDictionaryOf(NSString *, NSString *) *headers;
+@property(nonatomic, readonly) NSData *headerData;
+@property(nonatomic, readonly) NSData *body;
+@property(nonatomic, readonly) NSUInteger length;
+
++ (instancetype)partWithHeaders:(NSDictionary *)headers body:(NSData *)body;
+
+@end
+
+@interface GTMMIMEDocument : NSObject
+
+// Get or set the unique boundary for the parts that have been added.
+//
+// When creating a MIME document from parts, this is typically calculated
+// automatically after all parts have been added.
+@property(nonatomic, copy) NSString *boundary;
+
+#pragma mark - Methods for Creating a MIME Document
+
++ (instancetype)MIMEDocument;
+
+// Adds a new part to this mime document with the given headers and body.
+// The headers keys and values should be NSStrings.
+// Adding a part may cause the boundary string to change.
+- (void)addPartWithHeaders:(GTM_NSDictionaryOf(NSString *, NSString *) *)headers
+                      body:(NSData *)body GTM_NONNULL((1,2));
+
+// An inputstream that can be used to efficiently read the contents of the MIME document.
+//
+// Any parameter may be null if the result is not wanted.
+- (void)generateInputStream:(NSInputStream **)outStream
+                     length:(unsigned long long *)outLength
+                   boundary:(NSString **)outBoundary;
+
+// A dispatch_data_t with the contents of the MIME document.
+//
+// Note: dispatch_data_t is one-way toll-free bridged so the result
+// may be cast directly to NSData *.
+//
+// Any parameter may be null if the result is not wanted.
+- (void)generateDispatchData:(dispatch_data_t *)outDispatchData
+                      length:(unsigned long long *)outLength
+                    boundary:(NSString **)outBoundary;
+
+// Utility method for making a header section, including trailing newlines.
++ (NSData *)dataWithHeaders:(GTM_NSDictionaryOf(NSString *, NSString *) *)headers;
+
+#pragma mark - Methods for Parsing a MIME Document
+
+// Method for parsing out an array of MIME parts from a MIME document.
+//
+// Returns an array of GTMMIMEDocumentParts.  Returns nil if no part can
+// be found.
++ (GTM_NSArrayOf(GTMMIMEDocumentPart *) *)MIMEPartsWithBoundary:(NSString *)boundary
+                                                           data:(NSData *)fullDocumentData;
+
+// Utility method for efficiently searching possibly discontiguous NSData
+// for occurrences of target byte. This method does not "flatten" an NSData
+// that is composed of discontiguous blocks.
+//
+// The byte offsets of non-overlapping occurrences of the target are returned as
+// NSNumbers in the array.
++ (void)searchData:(NSData *)data
+       targetBytes:(const void *)targetBytes
+      targetLength:(NSUInteger)targetLength
+      foundOffsets:(GTM_NSArrayOf(NSNumber *) **)outFoundOffsets;
+
+// Utility method to parse header bytes into an NSDictionary.
++ (GTM_NSDictionaryOf(NSString *, NSString *) *)headersWithData:(NSData *)data;
+
+// ------ UNIT TESTING ONLY BELOW ------
+
+// Internal methods, exposed for unit testing only.
+- (void)seedRandomWith:(u_int32_t)seed;
+
++ (NSUInteger)findBytesWithNeedle:(const unsigned char *)needle
+                     needleLength:(NSUInteger)needleLength
+                         haystack:(const unsigned char *)haystack
+                   haystackLength:(NSUInteger)haystackLength
+                      foundOffset:(NSUInteger *)foundOffset;
+
++ (void)searchData:(NSData *)data
+       targetBytes:(const void *)targetBytes
+      targetLength:(NSUInteger)targetLength
+      foundOffsets:(GTM_NSArrayOf(NSNumber *) **)outFoundOffsets
+ foundBlockNumbers:(GTM_NSArrayOf(NSNumber *) **)outFoundBlockNumbers;
+
+@end

+ 49 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMReadMonitorInputStream.h

@@ -0,0 +1,49 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+#ifndef GTM_NONNULL
+  #if defined(__has_attribute)
+    #if __has_attribute(nonnull)
+      #define GTM_NONNULL(x) __attribute__((nonnull x))
+    #else
+      #define GTM_NONNULL(x)
+    #endif
+  #else
+    #define GTM_NONNULL(x)
+  #endif
+#endif
+
+
+@interface GTMReadMonitorInputStream : NSInputStream <NSStreamDelegate>
+
++ (instancetype)inputStreamWithStream:(NSInputStream *)input GTM_NONNULL((1));
+
+- (instancetype)initWithStream:(NSInputStream *)input GTM_NONNULL((1));
+
+// The read monitor selector is called when bytes have been read. It should have this signature:
+//
+// - (void)inputStream:(GTMReadMonitorInputStream *)stream
+//      readIntoBuffer:(uint8_t *)buffer
+//              length:(int64_t)length;
+
+@property(atomic, weak) id readDelegate;
+@property(atomic, assign) SEL readSelector;
+
+// Modes for invoking callbacks, when necessary.
+@property(atomic, strong) NSArray *runLoopModes;
+
+@end

+ 1305 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcher.h

@@ -0,0 +1,1305 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// GTMSessionFetcher is a wrapper around NSURLSession for http operations.
+//
+// What does this offer on top of of NSURLSession?
+//
+// - Block-style callbacks for useful functionality like progress rather
+//   than delegate methods.
+// - Out-of-process uploads and downloads using NSURLSession, including
+//   management of fetches after relaunch.
+// - Integration with GTMAppAuth for invisible management and refresh of
+//   authorization tokens.
+// - Pretty-printed http logging.
+// - Cookies handling that does not interfere with or get interfered with
+//   by WebKit cookies or on Mac by Safari and other apps.
+// - Credentials handling for the http operation.
+// - Rate-limiting and cookie grouping when fetchers are created with
+//   GTMSessionFetcherService.
+//
+// If the bodyData or bodyFileURL property is set, then a POST request is assumed.
+//
+// Each fetcher is assumed to be for a one-shot fetch request; don't reuse the object
+// for a second fetch.
+//
+// The fetcher will be self-retained as long as a connection is pending.
+//
+// To keep user activity private, URLs must have an https scheme (unless the property
+// allowedInsecureSchemes is set to permit the scheme.)
+//
+// Callbacks will be released when the fetch completes or is stopped, so there is no need
+// to use weak self references in the callback blocks.
+//
+// Sample usage:
+//
+//  _fetcherService = [[GTMSessionFetcherService alloc] init];
+//
+//  GTMSessionFetcher *myFetcher = [_fetcherService fetcherWithURLString:myURLString];
+//  myFetcher.retryEnabled = YES;
+//  myFetcher.comment = @"First profile image";
+//
+//  // Optionally specify a file URL or NSData for the request body to upload.
+//  myFetcher.bodyData = [postString dataUsingEncoding:NSUTF8StringEncoding];
+//
+//  [myFetcher beginFetchWithCompletionHandler:^(NSData *data, NSError *error) {
+//    if (error != nil) {
+//      // Server status code or network error.
+//      //
+//      // If the domain is kGTMSessionFetcherStatusDomain then the error code
+//      // is a failure status from the server.
+//    } else {
+//      // Fetch succeeded.
+//    }
+//  }];
+//
+// There is also a beginFetch call that takes a pointer and selector for the completion handler;
+// a pointer and selector is a better style when the callback is a substantial, separate method.
+//
+// NOTE:  Fetches may retrieve data from the server even though the server
+//        returned an error, so the criteria for success is a non-nil error.
+//        The completion handler is called when the server status is >= 300 with an NSError
+//        having domain kGTMSessionFetcherStatusDomain and code set to the server status.
+//
+//        Status codes are at <http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html>
+//
+//
+// Background session support:
+//
+// Out-of-process uploads and downloads may be created by setting the fetcher's
+// useBackgroundSession property. Data to be uploaded should be provided via
+// the uploadFileURL property; the download destination should be specified with
+// the destinationFileURL.  NOTE: Background upload files should be in a location
+// that will be valid even after the device is restarted, so the file should not
+// be uploaded from a system temporary or cache directory.
+//
+// Background session transfers are slower, and should typically be used only
+// for very large downloads or uploads (hundreds of megabytes).
+//
+// When background sessions are used in iOS apps, the application delegate must
+// pass through the parameters from UIApplicationDelegate's
+// application:handleEventsForBackgroundURLSession:completionHandler: to the
+// fetcher class.
+//
+// When the application has been relaunched, it may also create a new fetcher
+// instance to handle completion of the transfers.
+//
+//  - (void)application:(UIApplication *)application
+//      handleEventsForBackgroundURLSession:(NSString *)identifier
+//                        completionHandler:(void (^)())completionHandler {
+//    // Application was re-launched on completing an out-of-process download.
+//
+//    // Pass the URLSession info related to this re-launch to the fetcher class.
+//    [GTMSessionFetcher application:application
+//        handleEventsForBackgroundURLSession:identifier
+//                          completionHandler:completionHandler];
+//
+//    // Get a fetcher related to this re-launch and re-hook up a completionHandler to it.
+//    GTMSessionFetcher *fetcher = [GTMSessionFetcher fetcherWithSessionIdentifier:identifier];
+//    NSURL *destinationFileURL = fetcher.destinationFileURL;
+//    fetcher.completionHandler = ^(NSData *data, NSError *error) {
+//      [self downloadCompletedToFile:destinationFileURL error:error];
+//    };
+//  }
+//
+//
+// Threading and queue support:
+//
+// Networking always happens on a background thread; there is no advantage to
+// changing thread or queue to create or start a fetcher.
+//
+// Callbacks are run on the main thread; alternatively, the app may set the
+// fetcher's callbackQueue to a dispatch queue.
+//
+// Once the fetcher's beginFetch method has been called, the fetcher's methods and
+// properties may be accessed from any thread.
+//
+// Downloading to disk:
+//
+// To have downloaded data saved directly to disk, specify a file URL for the
+// destinationFileURL property.
+//
+// HTTP methods and headers:
+//
+// Alternative HTTP methods, like PUT, and custom headers can be specified by
+// creating the fetcher with an appropriate NSMutableURLRequest.
+//
+//
+// Caching:
+//
+// The fetcher avoids caching. That is best for API requests, but may hurt
+// repeat fetches of static data. Apps may enable a persistent disk cache by
+// customizing the config:
+//
+//  fetcher.configurationBlock = ^(GTMSessionFetcher *configFetcher,
+//                                 NSURLSessionConfiguration *config) {
+//    config.URLCache = [NSURLCache sharedURLCache];
+//  };
+//
+// Or use the standard system config to share cookie storage with web views
+// and to enable disk caching:
+//
+//  fetcher.configuration = [NSURLSessionConfiguration defaultSessionConfiguration];
+//
+//
+// Cookies:
+//
+// There are three supported mechanisms for remembering cookies between fetches.
+//
+// By default, a standalone GTMSessionFetcher uses a mutable array held
+// statically to track cookies for all instantiated fetchers.  This avoids
+// cookies being set by servers for the application from interfering with
+// Safari and WebKit cookie settings, and vice versa.
+// The fetcher cookies are lost when the application quits.
+//
+// To rely instead on WebKit's global NSHTTPCookieStorage, set the fetcher's
+// cookieStorage property:
+//   myFetcher.cookieStorage = [NSHTTPCookieStorage sharedHTTPCookieStorage];
+//
+// To share cookies with other apps, use the method introduced in iOS 9/OS X 10.11:
+//   myFetcher.cookieStorage =
+//     [NSHTTPCookieStorage sharedCookieStorageForGroupContainerIdentifier:kMyCompanyContainedID];
+//
+// To ignore existing cookies and only have cookies related to the single fetch
+// be applied, make a temporary cookie storage object:
+//   myFetcher.cookieStorage = [[GTMSessionCookieStorage alloc] init];
+//
+// Note: cookies set while following redirects will be sent to the server, as
+// the redirects are followed by the fetcher.
+//
+// To completely disable cookies, similar to setting cookieStorageMethod to
+// kGTMHTTPFetcherCookieStorageMethodNone, adjust the session configuration
+// appropriately in the fetcher or fetcher service:
+//  fetcher.configurationBlock = ^(GTMSessionFetcher *configFetcher,
+//                                 NSURLSessionConfiguration *config) {
+//    config.HTTPCookieAcceptPolicy = NSHTTPCookieAcceptPolicyNever;
+//    config.HTTPShouldSetCookies = NO;
+//  };
+//
+// If the fetcher is created from a GTMSessionFetcherService object
+// then the cookie storage mechanism is set to use the cookie storage in the
+// service object rather than the static storage. Disabling cookies in the
+// session configuration set on a service object will disable cookies for all
+// fetchers created from that GTMSessionFetcherService object, since the session
+// configuration is propagated to the fetcher.
+//
+//
+// Monitoring data transfers.
+//
+// The fetcher supports a variety of properties for progress monitoring
+// progress with callback blocks.
+//  GTMSessionFetcherSendProgressBlock sendProgressBlock
+//  GTMSessionFetcherReceivedProgressBlock receivedProgressBlock
+//  GTMSessionFetcherDownloadProgressBlock downloadProgressBlock
+//
+// If supplied by the server, the anticipated total download size is available
+// as [[myFetcher response] expectedContentLength] (and may be -1 for unknown
+// download sizes.)
+//
+//
+// Automatic retrying of fetches
+//
+// The fetcher can optionally create a timer and reattempt certain kinds of
+// fetch failures (status codes 408, request timeout; 502, gateway failure;
+// 503, service unavailable; 504, gateway timeout; networking errors
+// NSURLErrorTimedOut and NSURLErrorNetworkConnectionLost.)  The user may
+// set a retry selector to customize the type of errors which will be retried.
+//
+// Retries are done in an exponential-backoff fashion (that is, after 1 second,
+// 2, 4, 8, and so on.)
+//
+// Enabling automatic retries looks like this:
+//  myFetcher.retryEnabled = YES;
+//
+// With retries enabled, the completion callbacks are called only
+// when no more retries will be attempted. Calling the fetcher's stopFetching
+// method will terminate the retry timer, without the finished or failure
+// selectors being invoked.
+//
+// Optionally, the client may set the maximum retry interval:
+//  myFetcher.maxRetryInterval = 60.0; // in seconds; default is 60 seconds
+//                                     // for downloads, 600 for uploads
+//
+// Servers should never send a 400 or 500 status for errors that are retryable
+// by clients, as those values indicate permanent failures. In nearly all
+// cases, the default standard retry behavior is correct for clients, and no
+// custom client retry behavior is needed or appropriate. Servers that send
+// non-retryable status codes and expect the client to retry the request are
+// faulty.
+//
+// Still, the client may provide a block to determine if a status code or other
+// error should be retried. The block returns YES to set the retry timer or NO
+// to fail without additional fetch attempts.
+//
+// The retry method may return the |suggestedWillRetry| argument to get the
+// default retry behavior.  Server status codes are present in the
+// error argument, and have the domain kGTMSessionFetcherStatusDomain. The
+// user's method may look something like this:
+//
+//  myFetcher.retryBlock = ^(BOOL suggestedWillRetry, NSError *error,
+//                           GTMSessionFetcherRetryResponse response) {
+//    // Perhaps examine error.domain and error.code, or fetcher.retryCount
+//    //
+//    // Respond with YES to start the retry timer, NO to proceed to the failure
+//    // callback, or suggestedWillRetry to get default behavior for the
+//    // current error domain and code values.
+//    response(suggestedWillRetry);
+//  };
+
+
+#import <Foundation/Foundation.h>
+
+#if TARGET_OS_IPHONE
+#import <UIKit/UIKit.h>
+#endif
+#if TARGET_OS_WATCH
+#import <WatchKit/WatchKit.h>
+#endif
+
+// By default it is stripped from non DEBUG builds. Developers can override
+// this in their project settings.
+#ifndef STRIP_GTM_FETCH_LOGGING
+  #if !DEBUG
+    #define STRIP_GTM_FETCH_LOGGING 1
+  #else
+    #define STRIP_GTM_FETCH_LOGGING 0
+  #endif
+#endif
+
+// Logs in debug builds.
+#ifndef GTMSESSION_LOG_DEBUG
+  #if DEBUG
+    #define GTMSESSION_LOG_DEBUG(...) NSLog(__VA_ARGS__)
+  #else
+    #define GTMSESSION_LOG_DEBUG(...) do { } while (0)
+  #endif
+#endif
+
+// Asserts in debug builds (or logs in debug builds if GTMSESSION_ASSERT_AS_LOG
+// or NS_BLOCK_ASSERTIONS are defined.)
+#ifndef GTMSESSION_ASSERT_DEBUG
+  #if DEBUG && !defined(NS_BLOCK_ASSERTIONS) && !GTMSESSION_ASSERT_AS_LOG
+    #undef GTMSESSION_ASSERT_AS_LOG
+    #define GTMSESSION_ASSERT_AS_LOG 1
+  #endif
+
+  #if DEBUG && !GTMSESSION_ASSERT_AS_LOG
+    #define GTMSESSION_ASSERT_DEBUG(...) NSAssert(__VA_ARGS__)
+  #elif DEBUG
+    #define GTMSESSION_ASSERT_DEBUG(pred, ...) if (!(pred)) { NSLog(__VA_ARGS__); }
+  #else
+    #define GTMSESSION_ASSERT_DEBUG(pred, ...) do { } while (0)
+  #endif
+#endif
+
+// Asserts in debug builds, logs in release builds (or logs in debug builds if
+// GTMSESSION_ASSERT_AS_LOG is defined.)
+#ifndef GTMSESSION_ASSERT_DEBUG_OR_LOG
+  #if DEBUG && !GTMSESSION_ASSERT_AS_LOG
+    #define GTMSESSION_ASSERT_DEBUG_OR_LOG(...) NSAssert(__VA_ARGS__)
+  #else
+    #define GTMSESSION_ASSERT_DEBUG_OR_LOG(pred, ...) if (!(pred)) { NSLog(__VA_ARGS__); }
+  #endif
+#endif
+
+// Macro useful for examining messages from NSURLSession during debugging.
+#if 0
+#define GTM_LOG_SESSION_DELEGATE(...) GTMSESSION_LOG_DEBUG(__VA_ARGS__)
+#else
+#define GTM_LOG_SESSION_DELEGATE(...)
+#endif
+
+#ifndef GTM_NULLABLE
+  #if __has_feature(nullability)  // Available starting in Xcode 6.3
+    #define GTM_NULLABLE_TYPE __nullable
+    #define GTM_NONNULL_TYPE __nonnull
+    #define GTM_NULLABLE nullable
+    #define GTM_NONNULL_DECL nonnull  // GTM_NONNULL is used by GTMDefines.h
+    #define GTM_NULL_RESETTABLE null_resettable
+
+    #define GTM_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
+    #define GTM_ASSUME_NONNULL_END NS_ASSUME_NONNULL_END
+  #else
+    #define GTM_NULLABLE_TYPE
+    #define GTM_NONNULL_TYPE
+    #define GTM_NULLABLE
+    #define GTM_NONNULL_DECL
+    #define GTM_NULL_RESETTABLE
+    #define GTM_ASSUME_NONNULL_BEGIN
+    #define GTM_ASSUME_NONNULL_END
+  #endif  // __has_feature(nullability)
+#endif  // GTM_NULLABLE
+
+#if (TARGET_OS_TV \
+     || TARGET_OS_WATCH \
+     || (!TARGET_OS_IPHONE && defined(MAC_OS_X_VERSION_10_12) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_12) \
+     || (TARGET_OS_IPHONE && defined(__IPHONE_10_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_10_0))
+#define GTMSESSION_DEPRECATE_ON_2016_SDKS(_MSG) __attribute__((deprecated("" _MSG)))
+#else
+#define GTMSESSION_DEPRECATE_ON_2016_SDKS(_MSG)
+#endif
+
+#ifndef GTM_DECLARE_GENERICS
+  #if __has_feature(objc_generics)
+    #define GTM_DECLARE_GENERICS 1
+  #else
+    #define GTM_DECLARE_GENERICS 0
+  #endif
+#endif
+
+#ifndef GTM_NSArrayOf
+  #if GTM_DECLARE_GENERICS
+    #define GTM_NSArrayOf(value) NSArray<value>
+    #define GTM_NSDictionaryOf(key, value) NSDictionary<key, value>
+  #else
+    #define GTM_NSArrayOf(value) NSArray
+    #define GTM_NSDictionaryOf(key, value) NSDictionary
+  #endif // __has_feature(objc_generics)
+#endif  // GTM_NSArrayOf
+
+// For iOS, the fetcher can declare itself a background task to allow fetches
+// to finish when the app leaves the foreground.
+//
+// (This is unrelated to providing a background configuration, which allows
+// out-of-process uploads and downloads.)
+//
+// To disallow use of background tasks during fetches, the target should define
+// GTM_BACKGROUND_TASK_FETCHING to 0, or alternatively may set the
+// skipBackgroundTask property to YES.
+#if TARGET_OS_IPHONE && !TARGET_OS_WATCH && !defined(GTM_BACKGROUND_TASK_FETCHING)
+  #define GTM_BACKGROUND_TASK_FETCHING 1
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#if (TARGET_OS_TV \
+     || TARGET_OS_WATCH \
+     || (!TARGET_OS_IPHONE && defined(MAC_OS_X_VERSION_10_11) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_11) \
+     || (TARGET_OS_IPHONE && defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0))
+  #ifndef GTM_USE_SESSION_FETCHER
+    #define GTM_USE_SESSION_FETCHER 1
+  #endif
+#endif
+
+#if !defined(GTMBridgeFetcher)
+  // These bridge macros should be identical in GTMHTTPFetcher.h and GTMSessionFetcher.h
+  #if GTM_USE_SESSION_FETCHER
+  // Macros to new fetcher class.
+    #define GTMBridgeFetcher GTMSessionFetcher
+    #define GTMBridgeFetcherService GTMSessionFetcherService
+    #define GTMBridgeFetcherServiceProtocol GTMSessionFetcherServiceProtocol
+    #define GTMBridgeAssertValidSelector GTMSessionFetcherAssertValidSelector
+    #define GTMBridgeCookieStorage GTMSessionCookieStorage
+    #define GTMBridgeCleanedUserAgentString GTMFetcherCleanedUserAgentString
+    #define GTMBridgeSystemVersionString GTMFetcherSystemVersionString
+    #define GTMBridgeApplicationIdentifier GTMFetcherApplicationIdentifier
+    #define kGTMBridgeFetcherStatusDomain kGTMSessionFetcherStatusDomain
+    #define kGTMBridgeFetcherStatusBadRequest GTMSessionFetcherStatusBadRequest
+  #else
+    // Macros to old fetcher class.
+    #define GTMBridgeFetcher GTMHTTPFetcher
+    #define GTMBridgeFetcherService GTMHTTPFetcherService
+    #define GTMBridgeFetcherServiceProtocol GTMHTTPFetcherServiceProtocol
+    #define GTMBridgeAssertValidSelector GTMAssertSelectorNilOrImplementedWithArgs
+    #define GTMBridgeCookieStorage GTMCookieStorage
+    #define GTMBridgeCleanedUserAgentString GTMCleanedUserAgentString
+    #define GTMBridgeSystemVersionString GTMSystemVersionString
+    #define GTMBridgeApplicationIdentifier GTMApplicationIdentifier
+    #define kGTMBridgeFetcherStatusDomain kGTMHTTPFetcherStatusDomain
+    #define kGTMBridgeFetcherStatusBadRequest kGTMHTTPFetcherStatusBadRequest
+  #endif  // GTM_USE_SESSION_FETCHER
+#endif
+
+GTM_ASSUME_NONNULL_BEGIN
+
+// Notifications
+//
+// Fetch started and stopped, and fetch retry delay started and stopped.
+extern NSString *const kGTMSessionFetcherStartedNotification;
+extern NSString *const kGTMSessionFetcherStoppedNotification;
+extern NSString *const kGTMSessionFetcherRetryDelayStartedNotification;
+extern NSString *const kGTMSessionFetcherRetryDelayStoppedNotification;
+
+// Completion handler notification. This is intended for use by code capturing
+// and replaying fetch requests and results for testing. For fetches where
+// destinationFileURL or accumulateDataBlock is set for the fetcher, the data
+// will be nil for successful fetches.
+//
+// This notification is posted on the main thread.
+extern NSString *const kGTMSessionFetcherCompletionInvokedNotification;
+extern NSString *const kGTMSessionFetcherCompletionDataKey;
+extern NSString *const kGTMSessionFetcherCompletionErrorKey;
+
+// Constants for NSErrors created by the fetcher (excluding server status errors,
+// and error objects originating in the OS.)
+extern NSString *const kGTMSessionFetcherErrorDomain;
+
+// The fetcher turns server error status values (3XX, 4XX, 5XX) into NSErrors
+// with domain kGTMSessionFetcherStatusDomain.
+//
+// Any server response body data accompanying the status error is added to the
+// userInfo dictionary with key kGTMSessionFetcherStatusDataKey.
+extern NSString *const kGTMSessionFetcherStatusDomain;
+extern NSString *const kGTMSessionFetcherStatusDataKey;
+extern NSString *const kGTMSessionFetcherStatusDataContentTypeKey;
+
+// When a fetch fails with an error, these keys are included in the error userInfo
+// dictionary if retries were attempted.
+extern NSString *const kGTMSessionFetcherNumberOfRetriesDoneKey;
+extern NSString *const kGTMSessionFetcherElapsedIntervalWithRetriesKey;
+
+// Background session support requires access to NSUserDefaults.
+// If [NSUserDefaults standardUserDefaults] doesn't yield the correct NSUserDefaults for your usage,
+// ie for an App Extension, then implement this class/method to return the correct NSUserDefaults.
+// https://developer.apple.com/library/ios/documentation/General/Conceptual/ExtensibilityPG/ExtensionScenarios.html#//apple_ref/doc/uid/TP40014214-CH21-SW6
+@interface GTMSessionFetcherUserDefaultsFactory : NSObject
+
++ (NSUserDefaults *)fetcherUserDefaults;
+
+@end
+
+#ifdef __cplusplus
+}
+#endif
+
+typedef NS_ENUM(NSInteger, GTMSessionFetcherError) {
+  GTMSessionFetcherErrorDownloadFailed = -1,
+  GTMSessionFetcherErrorUploadChunkUnavailable = -2,
+  GTMSessionFetcherErrorBackgroundExpiration = -3,
+  GTMSessionFetcherErrorBackgroundFetchFailed = -4,
+  GTMSessionFetcherErrorInsecureRequest = -5,
+  GTMSessionFetcherErrorTaskCreationFailed = -6,
+};
+
+typedef NS_ENUM(NSInteger, GTMSessionFetcherStatus) {
+  // Standard http status codes.
+  GTMSessionFetcherStatusNotModified = 304,
+  GTMSessionFetcherStatusBadRequest = 400,
+  GTMSessionFetcherStatusUnauthorized = 401,
+  GTMSessionFetcherStatusForbidden = 403,
+  GTMSessionFetcherStatusPreconditionFailed = 412
+};
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+@class GTMSessionCookieStorage;
+@class GTMSessionFetcher;
+
+// The configuration block is for modifying the NSURLSessionConfiguration only.
+// DO NOT change any fetcher properties in the configuration block.
+typedef void (^GTMSessionFetcherConfigurationBlock)(GTMSessionFetcher *fetcher,
+                                                    NSURLSessionConfiguration *configuration);
+typedef void (^GTMSessionFetcherSystemCompletionHandler)(void);
+typedef void (^GTMSessionFetcherCompletionHandler)(NSData * GTM_NULLABLE_TYPE data,
+                                                   NSError * GTM_NULLABLE_TYPE error);
+typedef void (^GTMSessionFetcherBodyStreamProviderResponse)(NSInputStream *bodyStream);
+typedef void (^GTMSessionFetcherBodyStreamProvider)(GTMSessionFetcherBodyStreamProviderResponse response);
+typedef void (^GTMSessionFetcherDidReceiveResponseDispositionBlock)(NSURLSessionResponseDisposition disposition);
+typedef void (^GTMSessionFetcherDidReceiveResponseBlock)(NSURLResponse *response,
+                                                         GTMSessionFetcherDidReceiveResponseDispositionBlock dispositionBlock);
+typedef void (^GTMSessionFetcherChallengeDispositionBlock)(NSURLSessionAuthChallengeDisposition disposition,
+                                                           NSURLCredential * GTM_NULLABLE_TYPE credential);
+typedef void (^GTMSessionFetcherChallengeBlock)(GTMSessionFetcher *fetcher,
+                                                NSURLAuthenticationChallenge *challenge,
+                                                GTMSessionFetcherChallengeDispositionBlock dispositionBlock);
+typedef void (^GTMSessionFetcherWillRedirectResponse)(NSURLRequest * GTM_NULLABLE_TYPE redirectedRequest);
+typedef void (^GTMSessionFetcherWillRedirectBlock)(NSHTTPURLResponse *redirectResponse,
+                                                   NSURLRequest *redirectRequest,
+                                                   GTMSessionFetcherWillRedirectResponse response);
+typedef void (^GTMSessionFetcherAccumulateDataBlock)(NSData * GTM_NULLABLE_TYPE buffer);
+typedef void (^GTMSessionFetcherSimulateByteTransferBlock)(NSData * GTM_NULLABLE_TYPE buffer,
+                                                           int64_t bytesWritten,
+                                                           int64_t totalBytesWritten,
+                                                           int64_t totalBytesExpectedToWrite);
+typedef void (^GTMSessionFetcherReceivedProgressBlock)(int64_t bytesWritten,
+                                                       int64_t totalBytesWritten);
+typedef void (^GTMSessionFetcherDownloadProgressBlock)(int64_t bytesWritten,
+                                                       int64_t totalBytesWritten,
+                                                       int64_t totalBytesExpectedToWrite);
+typedef void (^GTMSessionFetcherSendProgressBlock)(int64_t bytesSent,
+                                                   int64_t totalBytesSent,
+                                                   int64_t totalBytesExpectedToSend);
+typedef void (^GTMSessionFetcherWillCacheURLResponseResponse)(NSCachedURLResponse * GTM_NULLABLE_TYPE cachedResponse);
+typedef void (^GTMSessionFetcherWillCacheURLResponseBlock)(NSCachedURLResponse *proposedResponse,
+                                                           GTMSessionFetcherWillCacheURLResponseResponse responseBlock);
+typedef void (^GTMSessionFetcherRetryResponse)(BOOL shouldRetry);
+typedef void (^GTMSessionFetcherRetryBlock)(BOOL suggestedWillRetry,
+                                            NSError * GTM_NULLABLE_TYPE error,
+                                            GTMSessionFetcherRetryResponse response);
+
+typedef void (^GTMSessionFetcherTestResponse)(NSHTTPURLResponse * GTM_NULLABLE_TYPE response,
+                                              NSData * GTM_NULLABLE_TYPE data,
+                                              NSError * GTM_NULLABLE_TYPE error);
+typedef void (^GTMSessionFetcherTestBlock)(GTMSessionFetcher *fetcherToTest,
+                                           GTMSessionFetcherTestResponse testResponse);
+
+void GTMSessionFetcherAssertValidSelector(id GTM_NULLABLE_TYPE obj, SEL GTM_NULLABLE_TYPE sel, ...);
+
+// Utility functions for applications self-identifying to servers via a
+// user-agent header
+
+// The "standard" user agent includes the application identifier, taken from the bundle,
+// followed by a space and the system version string. Pass nil to use +mainBundle as the source
+// of the bundle identifier.
+//
+// Applications may use this as a starting point for their own user agent strings, perhaps
+// with additional sections appended.  Use GTMFetcherCleanedUserAgentString() below to
+// clean up any string being added to the user agent.
+NSString *GTMFetcherStandardUserAgentString(NSBundle * GTM_NULLABLE_TYPE bundle);
+
+// Make a generic name and version for the current application, like
+// com.example.MyApp/1.2.3 relying on the bundle identifier and the
+// CFBundleShortVersionString or CFBundleVersion.
+//
+// The bundle ID may be overridden as the base identifier string by
+// adding to the bundle's Info.plist a "GTMUserAgentID" key.
+//
+// If no bundle ID or override is available, the process name preceded
+// by "proc_" is used.
+NSString *GTMFetcherApplicationIdentifier(NSBundle * GTM_NULLABLE_TYPE bundle);
+
+// Make an identifier like "MacOSX/10.7.1" or "iPod_Touch/4.1 hw/iPod1_1"
+NSString *GTMFetcherSystemVersionString(void);
+
+// Make a parseable user-agent identifier from the given string, replacing whitespace
+// and commas with underscores, and removing other characters that may interfere
+// with parsing of the full user-agent string.
+//
+// For example, @"[My App]" would become @"My_App"
+NSString *GTMFetcherCleanedUserAgentString(NSString *str);
+
+// Grab the data from an input stream. Since streams cannot be assumed to be rewindable,
+// this may be destructive; the caller can try to rewind the stream (by setting the
+// NSStreamFileCurrentOffsetKey property) or can just use the NSData to make a new
+// NSInputStream. This function is intended to facilitate testing rather than be used in
+// production.
+//
+// This function operates synchronously on the current thread. Depending on how the
+// input stream is implemented, it may be appropriate to dispatch to a different
+// queue before calling this function.
+//
+// Failure is indicated by a returned data value of nil.
+NSData * GTM_NULLABLE_TYPE GTMDataFromInputStream(NSInputStream *inputStream, NSError **outError);
+
+#ifdef __cplusplus
+}  // extern "C"
+#endif
+
+
+#if !GTM_USE_SESSION_FETCHER
+@protocol GTMHTTPFetcherServiceProtocol;
+#endif
+
+// This protocol allows abstract references to the fetcher service, primarily for
+// fetchers (which may be compiled without the fetcher service class present.)
+//
+// Apps should not need to use this protocol.
+@protocol GTMSessionFetcherServiceProtocol <NSObject>
+// This protocol allows us to call into the service without requiring
+// GTMSessionFetcherService sources in this project
+
+@property(atomic, strong) dispatch_queue_t callbackQueue;
+
+- (BOOL)fetcherShouldBeginFetching:(GTMSessionFetcher *)fetcher;
+- (void)fetcherDidCreateSession:(GTMSessionFetcher *)fetcher;
+- (void)fetcherDidBeginFetching:(GTMSessionFetcher *)fetcher;
+- (void)fetcherDidStop:(GTMSessionFetcher *)fetcher;
+
+- (GTMSessionFetcher *)fetcherWithRequest:(NSURLRequest *)request;
+- (BOOL)isDelayingFetcher:(GTMSessionFetcher *)fetcher;
+
+@property(atomic, assign) BOOL reuseSession;
+- (GTM_NULLABLE NSURLSession *)session;
+- (GTM_NULLABLE NSURLSession *)sessionForFetcherCreation;
+- (GTM_NULLABLE id<NSURLSessionDelegate>)sessionDelegate;
+- (GTM_NULLABLE NSDate *)stoppedAllFetchersDate;
+
+// Methods for compatibility with the old GTMHTTPFetcher.
+@property(readonly, strong, GTM_NULLABLE) NSOperationQueue *delegateQueue;
+
+@end  // @protocol GTMSessionFetcherServiceProtocol
+
+#ifndef GTM_FETCHER_AUTHORIZATION_PROTOCOL
+#define GTM_FETCHER_AUTHORIZATION_PROTOCOL 1
+@protocol GTMFetcherAuthorizationProtocol <NSObject>
+@required
+// This protocol allows us to call the authorizer without requiring its sources
+// in this project.
+- (void)authorizeRequest:(GTM_NULLABLE NSMutableURLRequest *)request
+                delegate:(id)delegate
+       didFinishSelector:(SEL)sel;
+
+- (void)stopAuthorization;
+
+- (void)stopAuthorizationForRequest:(NSURLRequest *)request;
+
+- (BOOL)isAuthorizingRequest:(NSURLRequest *)request;
+
+- (BOOL)isAuthorizedRequest:(NSURLRequest *)request;
+
+@property(strong, readonly, GTM_NULLABLE) NSString *userEmail;
+
+@optional
+
+// Indicate if authorization may be attempted. Even if this succeeds,
+// authorization may fail if the user's permissions have been revoked.
+@property(readonly) BOOL canAuthorize;
+
+// For development only, allow authorization of non-SSL requests, allowing
+// transmission of the bearer token unencrypted.
+@property(assign) BOOL shouldAuthorizeAllRequests;
+
+- (void)authorizeRequest:(GTM_NULLABLE NSMutableURLRequest *)request
+       completionHandler:(void (^)(NSError * GTM_NULLABLE_TYPE error))handler;
+
+#if GTM_USE_SESSION_FETCHER
+@property (weak, GTM_NULLABLE) id<GTMSessionFetcherServiceProtocol> fetcherService;
+#else
+@property (weak, GTM_NULLABLE) id<GTMHTTPFetcherServiceProtocol> fetcherService;
+#endif
+
+- (BOOL)primeForRefresh;
+
+@end
+#endif  // GTM_FETCHER_AUTHORIZATION_PROTOCOL
+
+#if GTM_BACKGROUND_TASK_FETCHING
+// A protocol for an alternative target for messages from GTMSessionFetcher to UIApplication.
+// Set the target using +[GTMSessionFetcher setSubstituteUIApplication:]
+@protocol GTMUIApplicationProtocol <NSObject>
+- (UIBackgroundTaskIdentifier)beginBackgroundTaskWithName:(nullable NSString *)taskName
+                                        expirationHandler:(void(^ __nullable)(void))handler;
+- (void)endBackgroundTask:(UIBackgroundTaskIdentifier)identifier;
+@end
+#endif
+
+#pragma mark -
+
+// GTMSessionFetcher objects are used for async retrieval of an http get or post
+//
+// See additional comments at the beginning of this file
+@interface GTMSessionFetcher : NSObject <NSURLSessionDelegate>
+
+// Create a fetcher
+//
+// fetcherWithRequest will return an autoreleased fetcher, but if
+// the connection is successfully created, the connection should retain the
+// fetcher for the life of the connection as well. So the caller doesn't have
+// to retain the fetcher explicitly unless they want to be able to cancel it.
++ (instancetype)fetcherWithRequest:(GTM_NULLABLE NSURLRequest *)request;
+
+// Convenience methods that make a request, like +fetcherWithRequest
++ (instancetype)fetcherWithURL:(NSURL *)requestURL;
++ (instancetype)fetcherWithURLString:(NSString *)requestURLString;
+
+// Methods for creating fetchers to continue previous fetches.
++ (instancetype)fetcherWithDownloadResumeData:(NSData *)resumeData;
++ (GTM_NULLABLE instancetype)fetcherWithSessionIdentifier:(NSString *)sessionIdentifier;
+
+// Returns an array of currently active fetchers for background sessions,
+// both restarted and newly created ones.
++ (GTM_NSArrayOf(GTMSessionFetcher *) *)fetchersForBackgroundSessions;
+
+// Designated initializer.
+//
+// Applications should create fetchers with a "fetcherWith..." method on a fetcher
+// service or a class method, not with this initializer.
+//
+// The configuration should typically be nil. Applications needing to customize
+// the configuration may do so by setting the configurationBlock property.
+- (instancetype)initWithRequest:(GTM_NULLABLE NSURLRequest *)request
+                  configuration:(GTM_NULLABLE NSURLSessionConfiguration *)configuration;
+
+// The fetcher's request.  This may not be set after beginFetch has been invoked. The request
+// may change due to redirects.
+@property(strong, GTM_NULLABLE) NSURLRequest *request;
+
+// Set a header field value on the request. Header field value changes will not
+// affect a fetch after the fetch has begun.
+- (void)setRequestValue:(GTM_NULLABLE NSString *)value forHTTPHeaderField:(NSString *)field;
+
+// Data used for resuming a download task.
+@property(atomic, readonly, GTM_NULLABLE) NSData *downloadResumeData;
+
+// The configuration; this must be set before the fetch begins. If no configuration is
+// set or inherited from the fetcher service, then the fetcher uses an ephemeral config.
+//
+// NOTE: This property should typically be nil. Applications needing to customize
+// the configuration should do so by setting the configurationBlock property.
+// That allows the fetcher to pick an appropriate base configuration, with the
+// application setting only the configuration properties it needs to customize.
+@property(atomic, strong, GTM_NULLABLE) NSURLSessionConfiguration *configuration;
+
+// A block the client may use to customize the configuration used to create the session.
+//
+// This is called synchronously, either on the thread that begins the fetch or, during a retry,
+// on the main thread. The configuration block may be called repeatedly if multiple fetchers are
+// created.
+//
+// The configuration block is for modifying the NSURLSessionConfiguration only.
+// DO NOT change any fetcher properties in the configuration block. Fetcher properties
+// may be set in the fetcher service prior to fetcher creation, or on the fetcher prior
+// to invoking beginFetch.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherConfigurationBlock configurationBlock;
+
+// A session is created as needed by the fetcher.  A fetcher service object
+// may maintain sessions for multiple fetches to the same host.
+@property(atomic, strong, GTM_NULLABLE) NSURLSession *session;
+
+// The task in flight.
+@property(atomic, readonly, GTM_NULLABLE) NSURLSessionTask *sessionTask;
+
+// The background session identifier.
+@property(atomic, readonly, GTM_NULLABLE) NSString *sessionIdentifier;
+
+// Indicates a fetcher created to finish a background session task.
+@property(atomic, readonly) BOOL wasCreatedFromBackgroundSession;
+
+// Additional user-supplied data to encode into the session identifier. Since session identifier
+// length limits are unspecified, this should be kept small. Key names beginning with an underscore
+// are reserved for use by the fetcher.
+@property(atomic, strong, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, NSString *) *sessionUserInfo;
+
+// The human-readable description to be assigned to the task.
+@property(atomic, copy, GTM_NULLABLE) NSString *taskDescription;
+
+// The priority assigned to the task, if any.  Use NSURLSessionTaskPriorityLow,
+// NSURLSessionTaskPriorityDefault, or NSURLSessionTaskPriorityHigh.
+@property(atomic, assign) float taskPriority;
+
+// The fetcher encodes information used to resume a session in the session identifier.
+// This method, intended for internal use returns the encoded information.  The sessionUserInfo
+// dictionary is stored as identifier metadata.
+- (GTM_NULLABLE GTM_NSDictionaryOf(NSString *, NSString *) *)sessionIdentifierMetadata;
+
+#if TARGET_OS_IPHONE && !TARGET_OS_WATCH
+// The app should pass to this method the completion handler passed in the app delegate method
+// application:handleEventsForBackgroundURLSession:completionHandler:
++ (void)application:(UIApplication *)application
+    handleEventsForBackgroundURLSession:(NSString *)identifier
+                      completionHandler:(GTMSessionFetcherSystemCompletionHandler)completionHandler;
+#endif
+
+// Indicate that a newly created session should be a background session.
+// A new session identifier will be created by the fetcher.
+//
+// Warning:  The only thing background sessions are for is rare download
+// of huge, batched files of data. And even just for those, there's a lot
+// of pain and hackery needed to get transfers to actually happen reliably
+// with background sessions.
+//
+// Don't try to upload or download in many background sessions, since the system
+// will impose an exponentially increasing time penalty to prevent the app from
+// getting too much background execution time.
+//
+// References:
+//
+//   "Moving to Fewer, Larger Transfers"
+//   https://forums.developer.apple.com/thread/14853
+//
+//   "NSURLSession’s Resume Rate Limiter"
+//   https://forums.developer.apple.com/thread/14854
+//
+//   "Background Session Task state persistence"
+//   https://forums.developer.apple.com/thread/11554
+//
+@property(assign) BOOL useBackgroundSession;
+
+// Indicates if the fetcher was started using a background session.
+@property(atomic, readonly, getter=isUsingBackgroundSession) BOOL usingBackgroundSession;
+
+// Indicates if uploads should use an upload task.  This is always set for file or stream-provider
+// bodies, but may be set explicitly for NSData bodies.
+@property(atomic, assign) BOOL useUploadTask;
+
+// Indicates that the fetcher is using a session that may be shared with other fetchers.
+@property(atomic, readonly) BOOL canShareSession;
+
+// By default, the fetcher allows only secure (https) schemes unless this
+// property is set, or the GTM_ALLOW_INSECURE_REQUESTS build flag is set.
+//
+// For example, during debugging when fetching from a development server that lacks SSL support,
+// this may be set to @[ @"http" ], or when the fetcher is used to retrieve local files,
+// this may be set to @[ @"file" ].
+//
+// This should be left as nil for release builds to avoid creating the opportunity for
+// leaking private user behavior and data.  If a server is providing insecure URLs
+// for fetching by the client app, report the problem as server security & privacy bug.
+//
+// For builds with the iOS 9/OS X 10.11 and later SDKs, this property is required only when
+// the app specifies NSAppTransportSecurity/NSAllowsArbitraryLoads in the main bundle's Info.plist.
+@property(atomic, copy, GTM_NULLABLE) GTM_NSArrayOf(NSString *) *allowedInsecureSchemes;
+
+// By default, the fetcher prohibits localhost requests unless this property is set,
+// or the GTM_ALLOW_INSECURE_REQUESTS build flag is set.
+//
+// For localhost requests, the URL scheme is not checked  when this property is set.
+//
+// For builds with the iOS 9/OS X 10.11 and later SDKs, this property is required only when
+// the app specifies NSAppTransportSecurity/NSAllowsArbitraryLoads in the main bundle's Info.plist.
+@property(atomic, assign) BOOL allowLocalhostRequest;
+
+// By default, the fetcher requires valid server certs.  This may be bypassed
+// temporarily for development against a test server with an invalid cert.
+@property(atomic, assign) BOOL allowInvalidServerCertificates;
+
+// Cookie storage object for this fetcher. If nil, the fetcher will use a static cookie
+// storage instance shared among fetchers. If this fetcher was created by a fetcher service
+// object, it will be set to use the service object's cookie storage. See Cookies section above for
+// the full discussion.
+//
+// Because as of Jan 2014 standalone instances of NSHTTPCookieStorage do not actually
+// store any cookies (Radar 15735276) we use our own subclass, GTMSessionCookieStorage,
+// to hold cookies in memory.
+@property(atomic, strong, GTM_NULLABLE) NSHTTPCookieStorage *cookieStorage;
+
+// Setting the credential is optional; it is used if the connection receives
+// an authentication challenge.
+@property(atomic, strong, GTM_NULLABLE) NSURLCredential *credential;
+
+// Setting the proxy credential is optional; it is used if the connection
+// receives an authentication challenge from a proxy.
+@property(atomic, strong, GTM_NULLABLE) NSURLCredential *proxyCredential;
+
+// If body data, body file URL, or body stream provider is not set, then a GET request
+// method is assumed.
+@property(atomic, strong, GTM_NULLABLE) NSData *bodyData;
+
+// File to use as the request body. This forces use of an upload task.
+@property(atomic, strong, GTM_NULLABLE) NSURL *bodyFileURL;
+
+// Length of body to send, expected or actual.
+@property(atomic, readonly) int64_t bodyLength;
+
+// The body stream provider may be called repeatedly to provide a body.
+// Setting a body stream provider forces use of an upload task.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherBodyStreamProvider bodyStreamProvider;
+
+// Object to add authorization to the request, if needed.
+//
+// This may not be changed once beginFetch has been invoked.
+@property(atomic, strong, GTM_NULLABLE) id<GTMFetcherAuthorizationProtocol> authorizer;
+
+// The service object that created and monitors this fetcher, if any.
+@property(atomic, strong) id<GTMSessionFetcherServiceProtocol> service;
+
+// The host, if any, used to classify this fetcher in the fetcher service.
+@property(atomic, copy, GTM_NULLABLE) NSString *serviceHost;
+
+// The priority, if any, used for starting fetchers in the fetcher service.
+//
+// Lower values are higher priority; the default is 0, and values may
+// be negative or positive. This priority affects only the start order of
+// fetchers that are being delayed by a fetcher service when the running fetchers
+// exceeds the service's maxRunningFetchersPerHost.  A priority of NSIntegerMin will
+// exempt this fetcher from delay.
+@property(atomic, assign) NSInteger servicePriority;
+
+// The delegate's optional didReceiveResponse block may be used to inspect or alter
+// the session task response.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherDidReceiveResponseBlock didReceiveResponseBlock;
+
+// The delegate's optional challenge block may be used to inspect or alter
+// the session task challenge.
+//
+// If this block is not set, the fetcher's default behavior for the NSURLSessionTask
+// didReceiveChallenge: delegate method is to use the fetcher's respondToChallenge: method
+// which relies on the fetcher's credential and proxyCredential properties.
+//
+// Warning: This may be called repeatedly if the challenge fails. Check
+// challenge.previousFailureCount to identify repeated invocations.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherChallengeBlock challengeBlock;
+
+// The delegate's optional willRedirect block may be used to inspect or alter
+// the redirection.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherWillRedirectBlock willRedirectBlock;
+
+// The optional send progress block reports body bytes uploaded.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherSendProgressBlock sendProgressBlock;
+
+// The optional accumulate block may be set by clients wishing to accumulate data
+// themselves rather than let the fetcher append each buffer to an NSData.
+//
+// When this is called with nil data (such as on redirect) the client
+// should empty its accumulation buffer.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherAccumulateDataBlock accumulateDataBlock;
+
+// The optional received progress block may be used to monitor data
+// received from a data task.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherReceivedProgressBlock receivedProgressBlock;
+
+// The delegate's optional downloadProgress block may be used to monitor download
+// progress in writing to disk.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherDownloadProgressBlock downloadProgressBlock;
+
+// The delegate's optional willCacheURLResponse block may be used to alter the cached
+// NSURLResponse. The user may prevent caching by passing nil to the block's response.
+//
+// This is called on the callback queue.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherWillCacheURLResponseBlock willCacheURLResponseBlock;
+
+// Enable retrying; see comments at the top of this file.  Setting
+// retryEnabled=YES resets the min and max retry intervals.
+@property(atomic, assign, getter=isRetryEnabled) BOOL retryEnabled;
+
+// Retry block is optional for retries.
+//
+// If present, this block should call the response block with YES to cause a retry or NO to end the
+// fetch.
+// See comments at the top of this file.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherRetryBlock retryBlock;
+
+// Retry intervals must be strictly less than maxRetryInterval, else
+// they will be limited to maxRetryInterval and no further retries will
+// be attempted.  Setting maxRetryInterval to 0.0 will reset it to the
+// default value, 60 seconds for downloads and 600 seconds for uploads.
+@property(atomic, assign) NSTimeInterval maxRetryInterval;
+
+// Starting retry interval.  Setting minRetryInterval to 0.0 will reset it
+// to a random value between 1.0 and 2.0 seconds.  Clients should normally not
+// set this except for unit testing.
+@property(atomic, assign) NSTimeInterval minRetryInterval;
+
+// Multiplier used to increase the interval between retries, typically 2.0.
+// Clients should not need to set this.
+@property(atomic, assign) double retryFactor;
+
+// Number of retries attempted.
+@property(atomic, readonly) NSUInteger retryCount;
+
+// Interval delay to precede next retry.
+@property(atomic, readonly) NSTimeInterval nextRetryInterval;
+
+#if GTM_BACKGROUND_TASK_FETCHING
+// Skip use of a UIBackgroundTask, thus requiring fetches to complete when the app is in the
+// foreground.
+//
+// Targets should define GTM_BACKGROUND_TASK_FETCHING to 0 to avoid use of a UIBackgroundTask
+// on iOS to allow fetches to complete in the background.  This property is available when
+// it's not practical to set the preprocessor define.
+@property(atomic, assign) BOOL skipBackgroundTask;
+#endif  // GTM_BACKGROUND_TASK_FETCHING
+
+// Begin fetching the request
+//
+// The delegate may optionally implement the callback or pass nil for the selector or handler.
+//
+// The delegate and all callback blocks are retained between the beginFetch call until after the
+// finish callback, or until the fetch is stopped.
+//
+// An error is passed to the callback for server statuses 300 or
+// higher, with the status stored as the error object's code.
+//
+// finishedSEL has a signature like:
+//   - (void)fetcher:(GTMSessionFetcher *)fetcher
+//  finishedWithData:(NSData *)data
+//             error:(NSError *)error;
+//
+// If the application has specified a destinationFileURL or an accumulateDataBlock
+// for the fetcher, the data parameter passed to the callback will be nil.
+
+- (void)beginFetchWithDelegate:(GTM_NULLABLE id)delegate
+             didFinishSelector:(GTM_NULLABLE SEL)finishedSEL;
+
+- (void)beginFetchWithCompletionHandler:(GTM_NULLABLE GTMSessionFetcherCompletionHandler)handler;
+
+// Returns YES if this fetcher is in the process of fetching a URL.
+@property(atomic, readonly, getter=isFetching) BOOL fetching;
+
+// Cancel the fetch of the request that's currently in progress.  The completion handler
+// will not be called.
+- (void)stopFetching;
+
+// A block to be called when the fetch completes.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherCompletionHandler completionHandler;
+
+// A block to be called if download resume data becomes available.
+@property(atomic, strong, GTM_NULLABLE) void (^resumeDataBlock)(NSData *);
+
+// Return the status code from the server response.
+@property(atomic, readonly) NSInteger statusCode;
+
+// Return the http headers from the response.
+@property(atomic, strong, readonly, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, NSString *) *responseHeaders;
+
+// The response, once it's been received.
+@property(atomic, strong, readonly, GTM_NULLABLE) NSURLResponse *response;
+
+// Bytes downloaded so far.
+@property(atomic, readonly) int64_t downloadedLength;
+
+// Buffer of currently-downloaded data, if available.
+@property(atomic, readonly, strong, GTM_NULLABLE) NSData *downloadedData;
+
+// Local path to which the downloaded file will be moved.
+//
+// If a file already exists at the path, it will be overwritten.
+// Will create the enclosing folders if they are not present.
+@property(atomic, strong, GTM_NULLABLE) NSURL *destinationFileURL;
+
+// The time this fetcher originally began fetching. This is useful as a time
+// barrier for ignoring irrelevant fetch notifications or callbacks.
+@property(atomic, strong, readonly, GTM_NULLABLE) NSDate *initialBeginFetchDate;
+
+// userData is retained solely for the convenience of the client.
+@property(atomic, strong, GTM_NULLABLE) id userData;
+
+// Stored property values are retained solely for the convenience of the client.
+@property(atomic, copy, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, id) *properties;
+
+- (void)setProperty:(GTM_NULLABLE id)obj forKey:(NSString *)key;  // Pass nil for obj to remove the property.
+- (GTM_NULLABLE id)propertyForKey:(NSString *)key;
+
+- (void)addPropertiesFromDictionary:(GTM_NSDictionaryOf(NSString *, id) *)dict;
+
+// Comments are useful for logging, so are strongly recommended for each fetcher.
+@property(atomic, copy, GTM_NULLABLE) NSString *comment;
+
+- (void)setCommentWithFormat:(NSString *)format, ... NS_FORMAT_FUNCTION(1, 2);
+
+// Log of request and response, if logging is enabled
+@property(atomic, copy, GTM_NULLABLE) NSString *log;
+
+// Callbacks are run on this queue.  If none is supplied, the main queue is used.
+@property(atomic, strong, GTM_NULL_RESETTABLE) dispatch_queue_t callbackQueue;
+
+// The queue used internally by the session to invoke its delegate methods in the fetcher.
+//
+// Application callbacks are always called by the fetcher on the callbackQueue above,
+// not on this queue. Apps should generally not change this queue.
+//
+// The default delegate queue is the main queue.
+//
+// This value is ignored after the session has been created, so this
+// property should be set in the fetcher service rather in the fetcher as it applies
+// to a shared session.
+@property(atomic, strong, GTM_NULL_RESETTABLE) NSOperationQueue *sessionDelegateQueue;
+
+// Spin the run loop or sleep the thread, discarding events, until the fetch has completed.
+//
+// This is only for use in testing or in tools without a user interface.
+//
+// Note:  Synchronous fetches should never be used by shipping apps; they are
+// sufficient reason for rejection from the app store.
+//
+// Returns NO if timed out.
+- (BOOL)waitForCompletionWithTimeout:(NSTimeInterval)timeoutInSeconds;
+
+// Test block is optional for testing.
+//
+// If present, this block will cause the fetcher to skip starting the session, and instead
+// use the test block response values when calling the completion handler and delegate code.
+//
+// Test code can set this on the fetcher or on the fetcher service.  For testing libraries
+// that use a fetcher without exposing either the fetcher or the fetcher service, the global
+// method setGlobalTestBlock: will set the block for all fetchers that do not have a test
+// block set.
+//
+// The test code can pass nil for all response parameters to indicate that the fetch
+// should proceed.
+//
+// Applications can exclude test block support by setting GTM_DISABLE_FETCHER_TEST_BLOCK.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherTestBlock testBlock;
+
++ (void)setGlobalTestBlock:(GTM_NULLABLE GTMSessionFetcherTestBlock)block;
+
+// When using the testBlock, |testBlockAccumulateDataChunkCount| is the desired number of chunks to
+// divide the response data into if the client has streaming enabled. The data will be divided up to
+// |testBlockAccumulateDataChunkCount| chunks; however, the exact amount may vary depending on the
+// size of the response data (e.g. a 1-byte response can only be divided into one chunk).
+@property(atomic, readwrite) NSUInteger testBlockAccumulateDataChunkCount;
+
+#if GTM_BACKGROUND_TASK_FETCHING
+// For testing or to override UIApplication invocations, apps may specify an alternative
+// target for messages to UIApplication.
++ (void)setSubstituteUIApplication:(nullable id<GTMUIApplicationProtocol>)substituteUIApplication;
++ (nullable id<GTMUIApplicationProtocol>)substituteUIApplication;
+#endif  // GTM_BACKGROUND_TASK_FETCHING
+
+// Exposed for testing.
++ (GTMSessionCookieStorage *)staticCookieStorage;
++ (BOOL)appAllowsInsecureRequests;
+
+#if STRIP_GTM_FETCH_LOGGING
+// If logging is stripped, provide a stub for the main method
+// for controlling logging.
++ (void)setLoggingEnabled:(BOOL)flag;
++ (BOOL)isLoggingEnabled;
+
+#else
+
+// These methods let an application log specific body text, such as the text description of a binary
+// request or response. The application should set the fetcher to defer response body logging until
+// the response has been received and the log response body has been set by the app. For example:
+//
+//   fetcher.logRequestBody = [binaryObject stringDescription];
+//   fetcher.deferResponseBodyLogging = YES;
+//   [fetcher beginFetchWithCompletionHandler:^(NSData *data, NSError *error) {
+//      if (error == nil) {
+//        fetcher.logResponseBody = [[[MyThing alloc] initWithData:data] stringDescription];
+//      }
+//      fetcher.deferResponseBodyLogging = NO;
+//   }];
+
+@property(atomic, copy, GTM_NULLABLE) NSString *logRequestBody;
+@property(atomic, assign) BOOL deferResponseBodyLogging;
+@property(atomic, copy, GTM_NULLABLE) NSString *logResponseBody;
+
+// Internal logging support.
+@property(atomic, readonly) NSData *loggedStreamData;
+@property(atomic, assign) BOOL hasLoggedError;
+@property(atomic, strong, GTM_NULLABLE) NSURL *redirectedFromURL;
+- (void)appendLoggedStreamData:(NSData *)dataToAdd;
+- (void)clearLoggedStreamData;
+
+#endif // STRIP_GTM_FETCH_LOGGING
+
+@end
+
+@interface GTMSessionFetcher (BackwardsCompatibilityOnly)
+// Clients using GTMSessionFetcher should set the cookie storage explicitly themselves.
+// This method is just for compatibility with the old GTMHTTPFetcher class.
+- (void)setCookieStorageMethod:(NSInteger)method;
+@end
+
+// Until we can just instantiate NSHTTPCookieStorage for local use, we'll
+// implement all the public methods ourselves.  This stores cookies only in
+// memory.  Additional methods are provided for testing.
+//
+// iOS 9/OS X 10.11 added +[NSHTTPCookieStorage sharedCookieStorageForGroupContainerIdentifier:]
+// which may also be used to create cookie storage.
+@interface GTMSessionCookieStorage : NSHTTPCookieStorage
+
+// Add the array off cookies to the storage, replacing duplicates.
+// Also removes expired cookies from the storage.
+- (void)setCookies:(GTM_NULLABLE GTM_NSArrayOf(NSHTTPCookie *) *)cookies;
+
+- (void)removeAllCookies;
+
+@end
+
+// Macros to monitor synchronization blocks in debug builds.
+// These report problems using GTMSessionCheckDebug.
+//
+// GTMSessionMonitorSynchronized           Start monitoring a top-level-only
+//                                         @sync scope.
+// GTMSessionMonitorRecursiveSynchronized  Start monitoring a top-level or
+//                                         recursive @sync scope.
+// GTMSessionCheckSynchronized             Verify that the current execution
+//                                         is inside a @sync scope.
+// GTMSessionCheckNotSynchronized          Verify that the current execution
+//                                         is not inside a @sync scope.
+//
+// Example usage:
+//
+// - (void)myExternalMethod {
+//   @synchronized(self) {
+//     GTMSessionMonitorSynchronized(self)
+//
+// - (void)myInternalMethod {
+//   GTMSessionCheckSynchronized(self);
+//
+// - (void)callMyCallbacks {
+//   GTMSessionCheckNotSynchronized(self);
+//
+// GTMSessionCheckNotSynchronized is available for verifying the code isn't
+// in a deadlockable @sync state when posting notifications and invoking
+// callbacks. Don't use GTMSessionCheckNotSynchronized immediately before a
+// @sync scope; the normal recursiveness check of GTMSessionMonitorSynchronized
+// can catch those.
+
+#ifdef __OBJC__
+// If asserts are entirely no-ops, the synchronization monitor is just a bunch
+// of counting code that doesn't report exceptional circumstances in any way.
+// Only build the synchronization monitor code if NS_BLOCK_ASSERTIONS is not
+// defined or asserts are being logged instead.
+#if DEBUG && (!defined(NS_BLOCK_ASSERTIONS) || GTMSESSION_ASSERT_AS_LOG)
+  #define __GTMSessionMonitorSynchronizedVariableInner(varname, counter) \
+      varname ## counter
+  #define __GTMSessionMonitorSynchronizedVariable(varname, counter)  \
+      __GTMSessionMonitorSynchronizedVariableInner(varname, counter)
+
+  #define GTMSessionMonitorSynchronized(obj)                                     \
+      NS_VALID_UNTIL_END_OF_SCOPE id                                             \
+        __GTMSessionMonitorSynchronizedVariable(__monitor, __COUNTER__) =        \
+        [[GTMSessionSyncMonitorInternal alloc] initWithSynchronizationObject:obj \
+                                                    allowRecursive:NO            \
+                                                     functionName:__func__]
+
+  #define GTMSessionMonitorRecursiveSynchronized(obj)                            \
+      NS_VALID_UNTIL_END_OF_SCOPE id                                             \
+        __GTMSessionMonitorSynchronizedVariable(__monitor, __COUNTER__) =        \
+        [[GTMSessionSyncMonitorInternal alloc] initWithSynchronizationObject:obj \
+                                                    allowRecursive:YES           \
+                                                     functionName:__func__]
+
+  #define GTMSessionCheckSynchronized(obj) {                                           \
+      GTMSESSION_ASSERT_DEBUG(                                                         \
+          [GTMSessionSyncMonitorInternal functionsHoldingSynchronizationOnObject:obj], \
+          @"GTMSessionCheckSynchronized(" #obj ") failed: not sync'd"                  \
+          @" on " #obj " in %s. Call stack:\n%@",                                      \
+          __func__, [NSThread callStackSymbols]);                                      \
+      }
+
+  #define GTMSessionCheckNotSynchronized(obj) {                                       \
+      GTMSESSION_ASSERT_DEBUG(                                                        \
+        ![GTMSessionSyncMonitorInternal functionsHoldingSynchronizationOnObject:obj], \
+        @"GTMSessionCheckNotSynchronized(" #obj ") failed: was sync'd"                \
+        @" on " #obj " in %s by %@. Call stack:\n%@", __func__,                       \
+        [GTMSessionSyncMonitorInternal functionsHoldingSynchronizationOnObject:obj],  \
+        [NSThread callStackSymbols]);                                                 \
+      }
+
+// GTMSessionSyncMonitorInternal is a private class that keeps track of the
+// beginning and end of synchronized scopes.
+//
+// This class should not be used directly, but only via the
+// GTMSessionMonitorSynchronized macro.
+@interface GTMSessionSyncMonitorInternal : NSObject
+- (instancetype)initWithSynchronizationObject:(id)object
+                               allowRecursive:(BOOL)allowRecursive
+                                 functionName:(const char *)functionName;
+// Return the names of the functions that hold sync on the object, or nil if none.
++ (NSArray *)functionsHoldingSynchronizationOnObject:(id)object;
+@end
+
+#else
+  #define GTMSessionMonitorSynchronized(obj) do { } while (0)
+  #define GTMSessionMonitorRecursiveSynchronized(obj) do { } while (0)
+  #define GTMSessionCheckSynchronized(obj) do { } while (0)
+  #define GTMSessionCheckNotSynchronized(obj) do { } while (0)
+#endif  // !DEBUG
+#endif  // __OBJC__
+
+
+GTM_ASSUME_NONNULL_END

+ 112 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcherLogging.h

@@ -0,0 +1,112 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "GTMSessionFetcher.h"
+
+// GTM HTTP Logging
+//
+// All traffic using GTMSessionFetcher can be easily logged.  Call
+//
+//   [GTMSessionFetcher setLoggingEnabled:YES];
+//
+// to begin generating log files.
+//
+// Unless explicitly set by the application using +setLoggingDirectory:,
+// logs are put into a default directory, located at:
+//   * macOS: ~/Desktop/GTMHTTPDebugLogs
+//   * iOS simulator: ~/GTMHTTPDebugLogs (in application sandbox)
+//   * iOS device: ~/Documents/GTMHTTPDebugLogs (in application sandbox)
+//
+// Tip: use the Finder's "Sort By Date" to find the most recent logs.
+//
+// Each run of an application gets a separate set of log files.  An html
+// file is generated to simplify browsing the run's http transactions.
+// The html file includes javascript links for inline viewing of uploaded
+// and downloaded data.
+//
+// A symlink is created in the logs folder to simplify finding the html file
+// for the latest run of the application; the symlink is called
+//
+//   AppName_http_log_newest.html
+//
+// For better viewing of XML logs, use Camino or Firefox rather than Safari.
+//
+// Each fetcher may be given a comment to be inserted as a label in the logs,
+// such as
+//   [fetcher setCommentWithFormat:@"retrieve item %@", itemName];
+//
+// Projects may define STRIP_GTM_FETCH_LOGGING to remove logging code.
+
+#if !STRIP_GTM_FETCH_LOGGING
+
+@interface GTMSessionFetcher (GTMSessionFetcherLogging)
+
+// Note: on macOS the default logs directory is ~/Desktop/GTMHTTPDebugLogs; on
+// iOS simulators it will be the ~/GTMHTTPDebugLogs (in the app sandbox); on
+// iOS devices it will be in ~/Documents/GTMHTTPDebugLogs (in the app sandbox).
+// These directories will be created as needed, and are excluded from backups
+// to iCloud and iTunes.
+//
+// If a custom directory is set, the directory should already exist. It is
+// the application's responsibility to exclude any custom directory from
+// backups, if desired.
++ (void)setLoggingDirectory:(NSString *)path;
++ (NSString *)loggingDirectory;
+
+// client apps can turn logging on and off
++ (void)setLoggingEnabled:(BOOL)isLoggingEnabled;
++ (BOOL)isLoggingEnabled;
+
+// client apps can turn off logging to a file if they want to only check
+// the fetcher's log property
++ (void)setLoggingToFileEnabled:(BOOL)isLoggingToFileEnabled;
++ (BOOL)isLoggingToFileEnabled;
+
+// client apps can optionally specify process name and date string used in
+// log file names
++ (void)setLoggingProcessName:(NSString *)processName;
++ (NSString *)loggingProcessName;
+
++ (void)setLoggingDateStamp:(NSString *)dateStamp;
++ (NSString *)loggingDateStamp;
+
+// client apps can specify the directory for the log for this specific run,
+// typically to match the directory used by another fetcher class, like:
+//
+//   [GTMSessionFetcher setLogDirectoryForCurrentRun:[GTMHTTPFetcher logDirectoryForCurrentRun]];
+//
+// Setting this overrides the logging directory, process name, and date stamp when writing
+// the log file.
++ (void)setLogDirectoryForCurrentRun:(NSString *)logDirectoryForCurrentRun;
++ (NSString *)logDirectoryForCurrentRun;
+
+// Prunes old log directories that have not been modified since the provided date.
+// This will not delete the current run's log directory.
++ (void)deleteLogDirectoriesOlderThanDate:(NSDate *)date;
+
+// internal; called by fetcher
+- (void)logFetchWithError:(NSError *)error;
+- (NSInputStream *)loggedInputStreamForInputStream:(NSInputStream *)inputStream;
+- (GTMSessionFetcherBodyStreamProvider)loggedStreamProviderForStreamProvider:
+    (GTMSessionFetcherBodyStreamProvider)streamProvider;
+
+// internal; accessors useful for viewing logs
++ (NSString *)processNameLogPrefix;
++ (NSString *)symlinkNameSuffix;
++ (NSString *)htmlFileName;
+
+@end
+
+#endif  // !STRIP_GTM_FETCH_LOGGING

+ 193 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionFetcherService.h

@@ -0,0 +1,193 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// For best performance and convenient usage, fetchers should be generated by a common
+// GTMSessionFetcherService instance, like
+//
+//   _fetcherService = [[GTMSessionFetcherService alloc] init];
+//   GTMSessionFetcher* myFirstFetcher = [_fetcherService fetcherWithRequest:request1];
+//   GTMSessionFetcher* mySecondFetcher = [_fetcherService fetcherWithRequest:request2];
+
+#import "GTMSessionFetcher.h"
+
+GTM_ASSUME_NONNULL_BEGIN
+
+// Notifications.
+
+// This notification indicates a reusable session has become invalid. It is intended mainly for the
+// service's unit tests.
+//
+// The notification object is the fetcher service.
+// The invalid session is provided via the userInfo kGTMSessionFetcherServiceSessionKey key.
+extern NSString *const kGTMSessionFetcherServiceSessionBecameInvalidNotification;
+extern NSString *const kGTMSessionFetcherServiceSessionKey;
+
+@interface GTMSessionFetcherService : NSObject<GTMSessionFetcherServiceProtocol>
+
+// Queues of delayed and running fetchers. Each dictionary contains arrays
+// of GTMSessionFetcher *fetchers, keyed by NSString *host
+@property(atomic, strong, readonly, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, NSArray *) *delayedFetchersByHost;
+@property(atomic, strong, readonly, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, NSArray *) *runningFetchersByHost;
+
+// A max value of 0 means no fetchers should be delayed.
+// The default limit is 10 simultaneous fetchers targeting each host.
+// This does not apply to fetchers whose useBackgroundSession property is YES. Since services are
+// not resurrected on an app relaunch, delayed fetchers would effectively be abandoned.
+@property(atomic, assign) NSUInteger maxRunningFetchersPerHost;
+
+// Properties to be applied to each fetcher; see GTMSessionFetcher.h for descriptions
+@property(atomic, strong, GTM_NULLABLE) NSURLSessionConfiguration *configuration;
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherConfigurationBlock configurationBlock;
+@property(atomic, strong, GTM_NULLABLE) NSHTTPCookieStorage *cookieStorage;
+@property(atomic, strong, GTM_NULL_RESETTABLE) dispatch_queue_t callbackQueue;
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherChallengeBlock challengeBlock;
+@property(atomic, strong, GTM_NULLABLE) NSURLCredential *credential;
+@property(atomic, strong) NSURLCredential *proxyCredential;
+@property(atomic, copy, GTM_NULLABLE) GTM_NSArrayOf(NSString *) *allowedInsecureSchemes;
+@property(atomic, assign) BOOL allowLocalhostRequest;
+@property(atomic, assign) BOOL allowInvalidServerCertificates;
+@property(atomic, assign, getter=isRetryEnabled) BOOL retryEnabled;
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherRetryBlock retryBlock;
+@property(atomic, assign) NSTimeInterval maxRetryInterval;
+@property(atomic, assign) NSTimeInterval minRetryInterval;
+@property(atomic, copy, GTM_NULLABLE) GTM_NSDictionaryOf(NSString *, id) *properties;
+
+#if GTM_BACKGROUND_TASK_FETCHING
+@property(atomic, assign) BOOL skipBackgroundTask;
+#endif
+
+// A default useragent of GTMFetcherStandardUserAgentString(nil) will be given to each fetcher
+// created by this service unless the request already has a user-agent header set.
+// This default will be added starting with builds with the SDKs for OS X 10.11 and iOS 9.
+//
+// To use the configuration's default user agent, set this property to nil.
+@property(atomic, copy, GTM_NULLABLE) NSString *userAgent;
+
+// The authorizer to attach to the created fetchers. If a specific fetcher should
+// not authorize its requests, the fetcher's authorizer property may be set to nil
+// before the fetch begins.
+@property(atomic, strong, GTM_NULLABLE) id<GTMFetcherAuthorizationProtocol> authorizer;
+
+// Delegate queue used by the session when calling back to the fetcher.  The default
+// is the main queue.  Changing this does not affect the queue used to call back to the
+// application; that is specified by the callbackQueue property above.
+@property(atomic, strong, GTM_NULL_RESETTABLE) NSOperationQueue *sessionDelegateQueue;
+
+// When enabled, indicates the same session should be used by subsequent fetchers.
+//
+// This is enabled by default.
+@property(atomic, assign) BOOL reuseSession;
+
+// Sets the delay until an unused session is invalidated.
+// The default interval is 60 seconds.
+//
+// If the interval is set to 0, then any reused session is not invalidated except by
+// explicitly invoking -resetSession.  Be aware that setting the interval to 0 thus
+// causes the session's delegate to be retained until the session is explicitly reset.
+@property(atomic, assign) NSTimeInterval unusedSessionTimeout;
+
+// If shouldReuseSession is enabled, this will force creation of a new session when future
+// fetchers begin.
+- (void)resetSession;
+
+// Create a fetcher
+//
+// These methods will return a fetcher. If successfully created, the connection
+// will hold a strong reference to it for the life of the connection as well.
+// So the caller doesn't have to hold onto the fetcher explicitly unless they
+// want to be able to monitor or cancel it.
+- (GTMSessionFetcher *)fetcherWithRequest:(NSURLRequest *)request;
+- (GTMSessionFetcher *)fetcherWithURL:(NSURL *)requestURL;
+- (GTMSessionFetcher *)fetcherWithURLString:(NSString *)requestURLString;
+
+// Common method for fetcher creation.
+//
+// -fetcherWithRequest:fetcherClass: may be overridden to customize creation of
+// fetchers.  This is the ONLY method in the GTMSessionFetcher library intended to
+// be overridden.
+- (id)fetcherWithRequest:(NSURLRequest *)request
+            fetcherClass:(Class)fetcherClass;
+
+- (BOOL)isDelayingFetcher:(GTMSessionFetcher *)fetcher;
+
+- (NSUInteger)numberOfFetchers;        // running + delayed fetchers
+- (NSUInteger)numberOfRunningFetchers;
+- (NSUInteger)numberOfDelayedFetchers;
+
+// Return a list of all running or delayed fetchers. This includes fetchers created
+// by the service which have been started and have not yet stopped.
+//
+// Returns an array of fetcher objects, or nil if none.
+- (GTM_NULLABLE GTM_NSArrayOf(GTMSessionFetcher *) *)issuedFetchers;
+
+// Search for running or delayed fetchers with the specified URL.
+//
+// Returns an array of fetcher objects found, or nil if none found.
+- (GTM_NULLABLE GTM_NSArrayOf(GTMSessionFetcher *) *)issuedFetchersWithRequestURL:(NSURL *)requestURL;
+
+- (void)stopAllFetchers;
+
+// Methods for use by the fetcher class only.
+- (GTM_NULLABLE NSURLSession *)session;
+- (GTM_NULLABLE NSURLSession *)sessionForFetcherCreation;
+- (GTM_NULLABLE id<NSURLSessionDelegate>)sessionDelegate;
+- (GTM_NULLABLE NSDate *)stoppedAllFetchersDate;
+
+// The testBlock can inspect its fetcher parameter's request property to
+// determine which fetcher is being faked.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionFetcherTestBlock testBlock;
+
+@end
+
+@interface GTMSessionFetcherService (TestingSupport)
+
+// Convenience methods to create a fetcher service for testing.
+//
+// Fetchers generated by this mock fetcher service will not perform any
+// network operation, but will invoke callbacks and provide the supplied data
+// or error to the completion handler.
+//
+// You can make more customized mocks by setting the test block property of the service
+// or fetcher; the test block can inspect the fetcher's request or other properties.
+//
+// See the description of the testBlock property below.
++ (instancetype)mockFetcherServiceWithFakedData:(GTM_NULLABLE NSData *)fakedDataOrNil
+                                     fakedError:(GTM_NULLABLE NSError *)fakedErrorOrNil;
++ (instancetype)mockFetcherServiceWithFakedData:(GTM_NULLABLE NSData *)fakedDataOrNil
+                                  fakedResponse:(NSHTTPURLResponse *)fakedResponse
+                                     fakedError:(GTM_NULLABLE NSError *)fakedErrorOrNil;
+
+// Spin the run loop and discard events (or, if not on the main thread, just sleep the thread)
+// until all running and delayed fetchers have completed.
+//
+// This is only for use in testing or in tools without a user interface.
+//
+// Synchronous fetches should never be done by shipping apps; they are
+// sufficient reason for rejection from the app store.
+//
+// Returns NO if timed out.
+- (BOOL)waitForCompletionOfAllFetchersWithTimeout:(NSTimeInterval)timeoutInSeconds;
+
+@end
+
+@interface GTMSessionFetcherService (BackwardsCompatibilityOnly)
+
+// Clients using GTMSessionFetcher should set the cookie storage explicitly themselves.
+// This method is just for compatibility with the old fetcher.
+@property(atomic, assign) NSInteger cookieStorageMethod;
+
+@end
+
+GTM_ASSUME_NONNULL_END

+ 166 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Headers/GTMSessionUploadFetcher.h

@@ -0,0 +1,166 @@
+/* Copyright 2014 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// GTMSessionUploadFetcher implements Google's resumable upload protocol.
+
+//
+// This subclass of GTMSessionFetcher simulates the series of fetches
+// needed for chunked upload as a single fetch operation.
+//
+// Protocol document:  TBD
+//
+// To the client, the only fetcher that exists is this class; the subsidiary
+// fetchers needed for uploading chunks are not visible (though the most recent
+// chunk fetcher may be accessed via the -activeFetcher or -chunkFetcher methods, and
+// -responseHeaders and -statusCode reflect results from the most recent chunk
+// fetcher.)
+//
+// Chunk fetchers are discarded as soon as they have completed.
+//
+// The protocol also allows for a cancellation notification request to be sent to the
+// server to allow discarding of the currently uploaded data and this will be sent
+// automatically upon calling stopFetching if the upload has already started.
+//
+// Note: Unlike the fetcher superclass, the methods of GTMSessionUploadFetcher should
+// only be used from the main thread until further work is done to make this subclass
+// thread-safe.
+
+#import "GTMSessionFetcher.h"
+#import "GTMSessionFetcherService.h"
+
+GTM_ASSUME_NONNULL_BEGIN
+
+// The value to use for file size parameters when the file size is not yet known.
+extern int64_t const kGTMSessionUploadFetcherUnknownFileSize;
+
+// Unless an application knows it needs a smaller chunk size, it should use the standard
+// chunk size, which sends the entire file as a single chunk to minimize upload overhead.
+// Setting an explicit chunk size that comfortably fits in memory is advisable for large
+// uploads.
+extern int64_t const kGTMSessionUploadFetcherStandardChunkSize;
+
+// When uploading requires data buffer allocations (such as uploading from an NSData or
+// an NSFileHandle) this is the maximum buffer size that will be created by the fetcher.
+extern int64_t const kGTMSessionUploadFetcherMaximumDemandBufferSize;
+
+// Notification that the upload location URL was provided by the server.
+extern NSString *const kGTMSessionFetcherUploadLocationObtainedNotification;
+
+// Block to provide data during uploads.
+//
+// Response data may be allocated with dataWithBytesNoCopy:length:freeWhenDone: for efficiency,
+// and released after the response block returns.
+//
+// If the length of the file being uploaded is unknown or already set, send
+// kGTMSessionUploadFetcherUnknownFileSize for |fullUploadLength|. Otherwise, set |fullUploadLength|
+// to its proper value.
+//
+// Pass nil as the data (and optionally an NSError) for a failure.
+typedef void (^GTMSessionUploadFetcherDataProviderResponse)(NSData * GTM_NULLABLE_TYPE data,
+                                                            int64_t fullUploadLength,
+                                                            NSError * GTM_NULLABLE_TYPE error);
+// Do not call the response with an NSData object with less data than the requested length unless
+// you are passing the fullUploadLength to the fetcher for the first time and it is the last chunk
+// of data in the file being uploaded.
+typedef void (^GTMSessionUploadFetcherDataProvider)(int64_t offset, int64_t length,
+    GTMSessionUploadFetcherDataProviderResponse response);
+
+// Block to be notified about the final status of the cancellation request started in stopFetching.
+//
+// |fetcher| will be the cancel request that was sent to the server, or nil if stopFetching is not
+// going to send a cancel request. If |fetcher| is provided, the other parameters correspond to the
+// completion handler of the cancellation request fetcher.
+typedef void (^GTMSessionUploadFetcherCancellationHandler)(
+    GTMSessionFetcher * GTM_NULLABLE_TYPE fetcher,
+    NSData * GTM_NULLABLE_TYPE data,
+    NSError * GTM_NULLABLE_TYPE error);
+
+@interface GTMSessionUploadFetcher : GTMSessionFetcher
+
+// Create an upload fetcher specifying either the request or the resume location URL,
+// then set an upload data source using one of these:
+//
+//   setUploadFileURL:
+//   setUploadDataLength:provider:
+//   setUploadFileHandle:
+//   setUploadData:
+
++ (instancetype)uploadFetcherWithRequest:(NSURLRequest *)request
+                          uploadMIMEType:(NSString *)uploadMIMEType
+                               chunkSize:(int64_t)chunkSize
+                          fetcherService:(GTM_NULLABLE GTMSessionFetcherService *)fetcherServiceOrNil;
+
++ (instancetype)uploadFetcherWithLocation:(NSURL * GTM_NULLABLE_TYPE)uploadLocationURL
+                           uploadMIMEType:(NSString *)uploadMIMEType
+                                chunkSize:(int64_t)chunkSize
+                           fetcherService:(GTM_NULLABLE GTMSessionFetcherService *)fetcherServiceOrNil;
+
+// Allows dataProviders for files of unknown length. Pass kGTMSessionUploadFetcherUnknownFileSize as
+// |fullLength| if the length is unknown.
+- (void)setUploadDataLength:(int64_t)fullLength
+                   provider:(GTM_NULLABLE GTMSessionUploadFetcherDataProvider)block;
+
++ (NSArray *)uploadFetchersForBackgroundSessions;
++ (GTM_NULLABLE instancetype)uploadFetcherForSessionIdentifier:(NSString *)sessionIdentifier;
+
+- (void)pauseFetching;
+- (void)resumeFetching;
+- (BOOL)isPaused;
+
+@property(atomic, strong, GTM_NULLABLE) NSURL *uploadLocationURL;
+@property(atomic, strong, GTM_NULLABLE) NSData *uploadData;
+@property(atomic, strong, GTM_NULLABLE) NSURL *uploadFileURL;
+@property(atomic, strong, GTM_NULLABLE) NSFileHandle *uploadFileHandle;
+@property(atomic, copy, readonly, GTM_NULLABLE) GTMSessionUploadFetcherDataProvider uploadDataProvider;
+@property(atomic, copy) NSString *uploadMIMEType;
+@property(atomic, assign) int64_t chunkSize;
+@property(atomic, readonly, assign) int64_t currentOffset;
+
+// The fetcher for the current data chunk, if any
+@property(atomic, strong, GTM_NULLABLE) GTMSessionFetcher *chunkFetcher;
+
+// The active fetcher is the current chunk fetcher, or the upload fetcher itself
+// if no chunk fetcher has yet been created.
+@property(atomic, readonly) GTMSessionFetcher *activeFetcher;
+
+// The last request made by an active fetcher.  Useful for testing.
+@property(atomic, readonly, GTM_NULLABLE) NSURLRequest *lastChunkRequest;
+
+// The status code from the most recently-completed fetch.
+@property(atomic, assign) NSInteger statusCode;
+
+// Invoked as part of the stop fetching process. Invoked immediately if there is no upload in
+// progress, otherwise invoked with the results of the attempt to notify the server that the
+// upload will not continue.
+//
+// Unlike other callbacks, since this is related specifically to the stopFetching flow it is not
+// cleared by stopFetching. It will instead clear itself after it is invoked or if the completion
+// has occured before stopFetching is called.
+@property(atomic, copy, GTM_NULLABLE) GTMSessionUploadFetcherCancellationHandler
+    cancellationHandler;
+
+// Exposed for testing only.
+@property(atomic, readonly, GTM_NULLABLE) dispatch_queue_t delegateCallbackQueue;
+@property(atomic, readonly, GTM_NULLABLE) GTMSessionFetcherCompletionHandler delegateCompletionHandler;
+
+@end
+
+@interface GTMSessionFetcher (GTMSessionUploadFetcherMethods)
+
+@property(readonly, GTM_NULLABLE) GTMSessionUploadFetcher *parentUploadFetcher;
+
+@end
+
+GTM_ASSUME_NONNULL_END

+ 6 - 0
Libraries external/Firebase/MLVision/GTMSessionFetcher.framework/Modules/module.modulemap

@@ -0,0 +1,6 @@
+framework module GTMSessionFetcher {
+umbrella header "GTMSessionFetcher.h"
+export *
+module * { export * }
+  link framework "Security"
+}

BIN
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/GoogleAPIClientForREST


+ 29 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBase64.h

@@ -0,0 +1,29 @@
+/* Copyright (c) 2012 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+NSData * _Nullable GTLRDecodeBase64(NSString * _Nullable base64Str);
+NSString * _Nullable GTLREncodeBase64(NSData * _Nullable data);
+
+// "Web-safe" encoding substitutes - and _ for + and / in the encoding table,
+// per http://www.ietf.org/rfc/rfc4648.txt section 5.
+
+NSData * _Nullable GTLRDecodeWebSafeBase64(NSString * _Nullable base64Str);
+NSString * _Nullable GTLREncodeWebSafeBase64(NSData * _Nullable data);
+
+NS_ASSUME_NONNULL_END

+ 85 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBatchQuery.h

@@ -0,0 +1,85 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//  Batch query documentation:
+//  https://github.com/google/google-api-objectivec-client-for-rest/wiki#batch-operations
+
+#import "GTLRQuery.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface GTLRBatchQuery : NSObject <GTLRQueryProtocol>
+
+/**
+ *  Queries included in this batch.  Each query should have a unique @c requestID.
+ */
+@property(atomic, copy, nullable) NSArray<GTLRQuery *> *queries;
+
+/**
+ *  Flag indicating if query execution should skip authorization. Defaults to NO.
+ */
+@property(atomic, assign) BOOL shouldSkipAuthorization;
+
+/**
+ *  Any additional HTTP headers for this batch.
+ *
+ *  These headers override the same keys from the service object's
+ *  @c additionalHTTPHeaders.
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalHTTPHeaders;
+
+/**
+ *  Any additional URL query parameters to add to the batch query.
+ *
+ *  These query parameters override the same keys from the service object's
+ *  @c additionalURLQueryParameters
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalURLQueryParameters;
+
+/**
+ *  The batch request multipart boundary, once determined.
+ */
+@property(atomic, copy, nullable) NSString *boundary;
+
+/**
+ *  The brief string to identify this query in @c GTMSessionFetcher http logs.
+ *
+ *  The default logging name for batch requests includes the API method names.
+ */
+@property(atomic, copy, nullable) NSString *loggingName;
+
+/**
+ *  Constructor for a batch query, for use with @c addQuery:
+ */
++ (instancetype)batchQuery;
+
+/**
+ *  Constructor for a batch query, from an array of @c GTLRQuery objects.
+ */
++ (instancetype)batchQueryWithQueries:(NSArray<GTLRQuery *> *)array;
+
+/**
+ *  Add a single @c GTLRQuery to the batch.
+ */
+- (void)addQuery:(GTLRQuery *)query;
+
+/**
+ *  Search the batch for a query with the specified ID.
+ */
+- (nullable GTLRQuery *)queryForRequestID:(NSString *)requestID;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 78 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRBatchResult.h

@@ -0,0 +1,78 @@
+/* Copyright (c) 2011 Google Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#import "GTLRObject.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class GTLRErrorObject;
+
+/**
+ *  A batch result includes a dictionary of successes, a dictionary of failures, and a dictionary of
+ *  HTTP response headers.
+ *
+ *  Dictionary keys are request ID strings; dictionary values are @c GTLRObject for
+ *  successes, @c GTLRErrorObject for failures, @c NSDictionary for responseHeaders.
+ *
+ *  For successes with no returned object (such as from delete operations),
+ *  the object for the dictionary entry is @c NSNull.
+ *
+ *  The original query for each result is available from the service ticket, as shown in
+ *  the code snippet here.
+ *
+ *  When the queries in the batch are unrelated, adding a @c completionBlock to each of
+ *  the queries may be a simpler way to handle the batch results.
+ *
+ *  @code
+ *  NSDictionary *successes = batchResults.successes;
+ *  for (NSString *requestID in successes) {
+ *    GTLRObject *obj = successes[requestID];
+ *    GTLRQuery *query = [ticket queryForRequestID:requestID];
+ *    NSLog(@"Query %@ returned object %@", query, obj);
+ *  }
+ *
+ *  NSDictionary *failures = batchResults.failures;
+ *  for (NSString *requestID in failures) {
+ *    GTLRErrorObject *errorObj = failures[requestID];
+ *    GTLRQuery *query = [ticket queryForRequestID:requestID];
+ *    NSLog(@"Query %@ failed with error %@", query, errorObj);
+ *  }
+ *  @endcode
+ */
+@interface GTLRBatchResult : GTLRObject
+
+/**
+ *  Object results of successful queries in the batch, keyed by request ID.
+ *
+ *  Queries which do not return an object when successful have a @c NSNull value.
+ */
+@property(atomic, strong, nullable) NSDictionary<NSString *, __kindof GTLRObject *> *successes;
+
+/**
+ *  Object results of unsuccessful queries in the batch, keyed by request ID.
+ */
+@property(atomic, strong, nullable) NSDictionary<NSString *, GTLRErrorObject *> *failures;
+
+/**
+ * Any HTTP response headers that were returned for a query request. Headers are optional therefore
+ * not all queries will have them. Query request with response headers are stored in a
+ * dictionary and keyed by request ID.
+ */
+@property(atomic, strong, nullable)
+    NSDictionary<NSString *, NSDictionary *> *responseHeaders;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 115 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDateTime.h

@@ -0,0 +1,115 @@
+/* Copyright (c) 2011 Google Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#import <Foundation/Foundation.h>
+#import "GTLRDefines.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  An immutable class representing a date and optionally a time in UTC.
+ */
+@interface GTLRDateTime : NSObject <NSCopying>
+
+/**
+ *  Constructor from a string representation.
+ */
++ (nullable instancetype)dateTimeWithRFC3339String:(nullable NSString *)str;
+
+/**
+ *  Constructor from a date and time representation.
+ */
++ (instancetype)dateTimeWithDate:(NSDate *)date;
+
+/**
+ *  Constructor from a date and time representation, along with an offset
+ *  minutes value used when creating a RFC3339 string representation.
+ *
+ *  The date value is independent of time zone; the offset affects how the
+ *  date will be rendered as a string.
+ *
+ *  The offsetMinutes may be initialized from a NSTimeZone as
+ *  (timeZone.secondsFromGMT / 60)
+ */
++ (instancetype)dateTimeWithDate:(NSDate *)date
+                   offsetMinutes:(NSInteger)offsetMinutes;
+
+/**
+ *  Constructor from a date for an all-day event.
+ *
+ *  Use this constructor to create a @c GTLRDateTime that is "date only".
+ *
+ *  @note @c hasTime will be set to NO.
+ */
++ (instancetype)dateTimeForAllDayWithDate:(NSDate *)date;
+
+/**
+ *  Constructor from date components.
+ */
++ (instancetype)dateTimeWithDateComponents:(NSDateComponents *)date;
+
+/**
+ *  The represented date and time.
+ *
+ *  If @c hasTime is NO, the time is set to noon GMT so the date is valid for all time zones.
+ */
+@property(nonatomic, readonly) NSDate *date;
+
+/**
+ *  The date and time as a RFC3339 string representation.
+ */
+@property(nonatomic, readonly) NSString *RFC3339String;
+
+/**
+ *  The date and time as a RFC3339 string representation.
+ *
+ *  This returns the same string as @c RFC3339String.
+ */
+@property(nonatomic, readonly) NSString *stringValue;
+
+/**
+ *  The represented date and time as date components.
+ */
+@property(nonatomic, readonly, copy) NSDateComponents *dateComponents;
+
+/**
+ *  The fraction of seconds represented, 0-999.
+ */
+@property(nonatomic, readonly) NSInteger milliseconds;
+
+/**
+ *  The time offset displayed in the string representation, if any.
+ *
+ *  If the offset is not nil, the date and time will be rendered as a string
+ *  for the time zone indicated by the offset.
+ *
+ *  An app may create a NSTimeZone for this with
+ *  [NSTimeZone timeZoneForSecondsFromGMT:(offsetMinutes.integerValue * 60)]
+ */
+@property(nonatomic, readonly, nullable) NSNumber *offsetMinutes;
+
+/**
+ *  Flag indicating if the object represents date only, or date with time.
+ */
+@property(nonatomic, readonly) BOOL hasTime;
+
+/**
+ *  The calendar used by this class, Gregorian and UTC.
+ */
++ (NSCalendar *)calendar;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 109 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDefines.h

@@ -0,0 +1,109 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// GTLRDefines.h
+//
+
+// Ensure Apple's conditionals we depend on are defined.
+#import <TargetConditionals.h>
+#import <AvailabilityMacros.h>
+
+// These can be redefined via a prefix if you are prefixing symbols to prefix
+// the names used in strings. Something like:
+//   #define _HELPER(x) "MyPrefix" #x
+//   #define GTLR_CLASSNAME_STR(x) @_HELPER(x)
+//   #define GTLR_CLASSNAME_CSTR(x) _HELPER(x)
+#ifndef GTLR_CLASSNAME_STR
+  #define _GTLR_CLASSNAME_HELPER(x) #x
+  #define GTLR_CLASSNAME_STR(x) @_GTLR_CLASSNAME_HELPER(x)
+  #define GTLR_CLASSNAME_CSTR(x) _GTLR_CLASSNAME_HELPER(x)
+#endif
+
+// Provide a common definition for externing constants/functions
+#if defined(__cplusplus)
+  #define GTLR_EXTERN extern "C"
+#else
+  #define GTLR_EXTERN extern
+#endif
+
+//
+// GTLR_ASSERT defaults to bridging to NSAssert. This macro exists just in case
+// it needs to be remapped.
+// GTLR_DEBUG_ASSERT is similar, but compiles in only for debug builds
+//
+
+#ifndef GTLR_ASSERT
+  // NSCAssert to avoid capturing self if used in a block.
+  #define GTLR_ASSERT(condition, ...) NSCAssert(condition, __VA_ARGS__)
+#endif // GTLR_ASSERT
+
+#ifndef GTLR_DEBUG_ASSERT
+  #if DEBUG && !defined(NS_BLOCK_ASSERTIONS)
+    #define GTLR_DEBUG_ASSERT(condition, ...) GTLR_ASSERT(condition, __VA_ARGS__)
+  #elif DEBUG
+    // In DEBUG builds with assertions blocked, log to avoid unused variable warnings.
+    #define GTLR_DEBUG_ASSERT(condition, ...) if (!(condition)) { NSLog(__VA_ARGS__); }
+  #else
+    #define GTLR_DEBUG_ASSERT(condition, ...) do { } while (0)
+  #endif
+#endif
+
+#ifndef GTLR_DEBUG_LOG
+  #if DEBUG
+    #define GTLR_DEBUG_LOG(...) NSLog(__VA_ARGS__)
+  #else
+    #define GTLR_DEBUG_LOG(...) do { } while (0)
+  #endif
+#endif
+
+#ifndef GTLR_DEBUG_ASSERT_CURRENT_QUEUE
+  #define GTLR_ASSERT_CURRENT_QUEUE_DEBUG(targetQueue)                  \
+      GTLR_DEBUG_ASSERT(0 == strcmp(GTLR_QUEUE_NAME(targetQueue),       \
+                        GTLR_QUEUE_NAME(DISPATCH_CURRENT_QUEUE_LABEL)), \
+          @"Current queue is %s (expected %s)",                         \
+          GTLR_QUEUE_NAME(DISPATCH_CURRENT_QUEUE_LABEL),                \
+          GTLR_QUEUE_NAME(targetQueue))
+
+  #define GTLR_QUEUE_NAME(queue) \
+      (strlen(dispatch_queue_get_label(queue)) > 0 ? dispatch_queue_get_label(queue) : "unnamed")
+#endif  // GTLR_ASSERT_CURRENT_QUEUE_DEBUG
+
+// Sanity check the min versions.
+
+#if (defined(TARGET_OS_TV) && TARGET_OS_TV) || (defined(TARGET_OS_WATCH) && TARGET_OS_WATCH)
+  // No min checks for these two.
+#elif TARGET_OS_IPHONE
+  #if !defined(__IPHONE_9_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_9_0)
+    #error "This project expects to be compiled with the iOS 9.0 SDK (or later)."
+  #endif
+  #if __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_7_0
+    #error "The minimum supported iOS version is 7.0."
+  #endif
+#elif TARGET_OS_MAC
+  #if !defined(MAC_OS_X_VERSION_10_10) || (MAC_OS_X_VERSION_MAX_ALLOWED < MAC_OS_X_VERSION_10_10)
+    #error "This project expects to be compiled with the OS X 10.10 SDK (or later)."
+  #endif
+  #if MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_9
+    #error "The minimum supported OS X version is 10.9."
+  #endif
+#else
+  #error "Unknown target platform."
+#endif
+
+// Version marker used to validate the generated sources against the library
+// version. The will be changed any time the library makes a change that means
+// past sources need to be regenerated.
+#define GTLR_RUNTIME_VERSION 3000

+ 83 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRDuration.h

@@ -0,0 +1,83 @@
+/* Copyright (c) 2016 Google Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#import <Foundation/Foundation.h>
+#import "GTLRDefines.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  An immutable class representing a string data type 'google-duration'.
+ *  It is based off the protocol buffers definition:
+ *  https://github.com/google/protobuf/blob/master/src/google/protobuf/duration.proto
+ */
+@interface GTLRDuration : NSObject <NSCopying>
+
+/**
+ *  Signed seconds of the span of time. Must be from -315,576,000,000
+ *  to +315,576,000,000 inclusive.
+ **/
+@property(nonatomic, readonly) int64_t seconds;
+
+/**
+ *  Signed fractions of a second at nanosecond resolution of the span
+ *  of time. Durations less than one second are represented with a 0
+ *  `seconds` field and a positive or negative `nanos` field. For durations
+ *  of one second or more, a non-zero value for the `nanos` field must be
+ *  of the same sign as the `seconds` field. Must be from -999,999,999
+ *  to +999,999,999 inclusive.
+ **/
+@property(nonatomic, readonly) int32_t nanos;
+
+/**
+ *  This duration expressed as a NSTimeInterval.
+ *
+ *  @note: Not all second/nanos combinations can be represented in a
+ *  NSTimeInterval, so this could be a lossy transform.
+ **/
+@property(nonatomic, readonly) NSTimeInterval timeInterval;
+
+/**
+ * Returns the string form used to send this data type in a JSON payload.
+ */
+@property(nonatomic, readonly) NSString *jsonString;
+
+/**
+ *  Constructor for a new duration with the given seconds and nanoseconds.
+ *
+ *  Will fail if seconds/nanos differ in sign or if nanos is more than one
+ *  second.
+ **/
++ (nullable instancetype)durationWithSeconds:(int64_t)seconds
+                                       nanos:(int32_t)nanos;
+
+/**
+ *  Constructor for a new duration from the given string form.
+ *
+ *  Will return nil if jsonString is invalid.
+ **/
++ (nullable instancetype)durationWithJSONString:(nullable NSString *)jsonString;
+
+/**
+ *  Constructor for a new duration from the NSTimeInterval.
+ *
+ *  @note NSTimeInterval doesn't always express things as exactly as one might
+ *  expect, so coverting from to integer seconds & nanos can reveal this.
+ **/
++ (instancetype)durationWithTimeInterval:(NSTimeInterval)timeInterval;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 116 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRErrorObject.h

@@ -0,0 +1,116 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "GTLRObject.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class GTLRErrorObjectErrorItem;
+@class GTLRErrorObjectDetail;
+
+/**
+ *  This class wraps JSON responses (both V1 and V2 of Google JSON errors) and NSErrors.
+ *
+ *  A GTLRErrorObject can be created using +objectWithJSON: or +objectWithFoundationError:
+ */
+@interface GTLRErrorObject : GTLRObject
+
+/**
+ *  Convenience method for creating an error object from an NSError.
+ *
+ *  @param error The @c NSError to be encapsulated by the @c GTLRErrorObject
+ *
+ *  @return A @c GTLRErrorObject wrapping the NSError.
+ */
++ (instancetype)objectWithFoundationError:(NSError *)error;
+
+/**
+ *  Convenience utility for extracting the GTLRErrorObject that was used to create an NSError.
+ *
+ *  @param foundationError The NSError that may have been obtained from a GTLRErrorObject.
+ *
+ *  @return The GTLRErrorObject, nil if the error was not originally from a GTLRErrorObject.
+ */
++ (nullable GTLRErrorObject *)underlyingObjectForError:(NSError *)foundationError;
+
+//
+// V1 & V2 properties.
+//
+
+/**
+ *  The numeric error code.
+ */
+@property(nonatomic, strong, nullable) NSNumber *code;
+
+/**
+ *  An error message string, typically provided by the API server.  This is not localized,
+ *  and its reliability depends on the API server.
+ */
+@property(nonatomic, strong, nullable) NSString *message;
+
+//
+// V1 properties.
+//
+
+/**
+ *  Underlying errors that occurred on the server.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRErrorObjectErrorItem *> *errors;
+
+//
+// V2 properties
+//
+
+/**
+ *  A status error string, defined by the API server, such as "NOT_FOUND".
+ */
+@property(nonatomic, strong, nullable) NSString *status;
+
+/**
+ *  Additional diagnostic error details provided by the API server.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRErrorObjectDetail *> *details;
+
+/**
+ *  An NSError, either underlying the error object or manufactured from the error object's
+ *  properties.
+ */
+@property(nonatomic, readonly) NSError *foundationError;
+
+@end
+
+/**
+ *  Class representing the items of the "errors" array inside the Google V1 error JSON.
+ *
+ *  Client applications should not rely on the property values of these items.
+ */
+@interface GTLRErrorObjectErrorItem : GTLRObject
+@property(nonatomic, strong, nullable) NSString *domain;
+@property(nonatomic, strong, nullable) NSString *reason;
+@property(nonatomic, strong, nullable) NSString *message;
+@property(nonatomic, strong, nullable) NSString *location;
+@end
+
+/**
+ *  Class representing the items of the "details" array inside the Google V2 error JSON.
+ *
+ *  Client applications should not rely on the property values of these items.
+ */
+@interface GTLRErrorObjectDetail : GTLRObject
+@property(nonatomic, strong, nullable) NSString *type;
+@property(nonatomic, strong, nullable) NSString *detail;
+@end
+
+NS_ASSUME_NONNULL_END

+ 34 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRFramework.h

@@ -0,0 +1,34 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "GTLRDefines.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+// Returns the version of the framework.  Major and minor should
+// match the bundle version in the Info.plist file.
+//
+// Pass NULL to ignore any of the parameters.
+
+void GTLRFrameworkVersion(NSUInteger * _Nullable major,
+                          NSUInteger * _Nullable minor,
+                          NSUInteger * _Nullable release);
+
+// Returns the version in @"a.b" or @"a.b.c" format
+NSString *GTLRFrameworkVersionString(void);
+
+NS_ASSUME_NONNULL_END

+ 317 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRObject.h

@@ -0,0 +1,317 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// GTLRObject documentation:
+// https://github.com/google/google-api-objectivec-client-for-rest/wiki#objects-and-queries
+
+#import <Foundation/Foundation.h>
+
+#import "GTLRDefines.h"
+#import "GTLRDateTime.h"
+#import "GTLRDuration.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class GTLRObject;
+
+/**
+ *  Protocol that can be implemented to provide custom logic for what class
+ *  should be created out of the given JSON.
+ */
+@protocol GTLRObjectClassResolver <NSObject>
+- (Class)classForJSON:(NSDictionary *)json
+         defaultClass:(Class)defaultClass;
+@end
+
+/**
+ *  Standard GTLRObjectClassResolver used by the core library.
+ */
+@interface GTLRObjectClassResolver : NSObject<GTLRObjectClassResolver>
+
+/**
+ *  Returns a resolver that will look up the 'kind' properties to find classes
+ *  based on the JSON.
+ *
+ *  The generated service classes provide a +kindStringToClassMap method for any
+ *  mappings that were found from discovery when generating the service.
+ */
++ (instancetype)resolverWithKindMap:(NSDictionary<NSString *, Class> *)kindStringToClassMap;
+
+/**
+ *  Returns a resolver that will look up the 'kind' properties to find classes
+ *  based on the JSON and then applies mapping of surrogate classes to swap out
+ *  specific classes.
+ *
+ *  Surrogates are subclasses to be instantiated instead of standard classes
+ *  when creating objects from the JSON. For example, this code will, for one query's
+ *  execution, swap a service's default resolver for one that will then use
+ *  MyCalendarEventSubclass instead of GTLRCalendarEvent and
+ *  MyCalendarReminderSubclass instead of GTLRCalendarReminder.
+ *
+ * @code
+ *  NSDictionary *surrogates = @{
+ *    [GTLRCalendarEvent class] : [MyCalendarEventSubclass class]
+ *    [GTLRCalendarReminder class] : [MyCalendarReminderSubclass class],
+ *  };
+ *  NSDictionary *serviceKindMap = [[calendarService class] kindStringToClassMap];
+ *  GTLRObjectClassResolver *updatedResolver =
+ *    [GTLRObjectClassResolver resolverWithKindMap:serviceKindMap
+ *                                       surrogates:surrogates];
+ *  query.executionParameters.objectClassResolver = updatedResolver;
+ * @endcode
+ *
+ * @note To install surrogates for all queries executed by the service, use
+ *       the service's @c -setSurrogates method.
+ */
++ (instancetype)resolverWithKindMap:(NSDictionary<NSString *, Class> *)kindStringToClassMap
+                         surrogates:(NSDictionary<Class, Class> *)surrogates;
+
+@end
+
+/**
+ * @c GTLRObject serves as the common superclass for classes wrapping JSON, errors, and other data
+ * passed in server requests and responses.
+ *
+ * @note This class is @em not safe for simultaneous use from multiple threads. Applications should
+ *       serialize or protect access to a @c GTLRObject instance as they would for any standard
+ *       Cocoa mutable container.
+ */
+@interface GTLRObject : NSObject <NSCopying, NSSecureCoding>
+
+/**
+ *  The JSON underlying the property values for this object.
+ *
+ *  The JSON should be accessed or set using the generated properties of a
+ *  class derived from GTLRObject or with the methods @c setJSONValue:forKey:
+ *  and @c JSONValueForKey:
+ *
+ *  @note: Applications should use @c additionalPropertyForKey: when accessing
+ *         API object properties that do not have generated @c \@property accessors.
+ */
+@property(nonatomic, strong, nullable) NSMutableDictionary *JSON;
+
+/**
+ *  A dictionary retained by the object for the convenience of the client application.
+ *
+ *  A client application may use this to retain any dictionary.
+ *
+ *  The values of the user properties dictionary will not be sent to the server during
+ *  query execution, and will not be copied by NSCopying or encoded by NSSecureCoding.
+ */
+@property(nonatomic, strong) NSDictionary *userProperties;
+
+/////////////////////////////////////////////////////////////////////////////////////////////
+//
+// Public methods
+//
+// These methods are intended for users of the library
+//
+/////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ *  Constructor for an empty object.
+ */
++ (instancetype)object;
+
+/**
+ *  Constructor for an object including JSON.
+ */
++ (instancetype)objectWithJSON:(nullable NSDictionary *)dict;
+
+/**
+ *  Constructor for an object including JSON and providing a resolver to help
+ *  select the correct classes for sub objects within the json.
+ *
+ *  The generated services provide a default resolver (-objectClassResolver)
+ *  that covers the kinds for that service. They also expose the kind mappings
+ *  via the +kindStringToClassMap method.
+ */
++ (instancetype)objectWithJSON:(nullable NSDictionary *)dict
+           objectClassResolver:(id<GTLRObjectClassResolver>)objectClassResolver;
+
+/**
+ *  The JSON for the object, or an empty string if there is no JSON or if the JSON
+ *  dictionary cannot be represented as JSON.
+ */
+- (NSString *)JSONString;
+
+/**
+ *  Generic access for setting entries in the JSON dictionary.  This creates the JSON dictionary
+ *  if necessary.
+ *
+ *  @note: Applications should use @c setAdditionalProperty:forKey: when setting
+ *         API object properties that do not have generated @c \@property accessors.
+ */
+- (void)setJSONValue:(nullable id)obj forKey:(nonnull NSString *)key;
+
+/**
+ *  Generic access to the JSON dictionary.
+ *
+ *  @note: Applications should use @c additionalPropertyForKey: when accessing
+ *         API object properties that do not have generated @c \@property accessors.
+ */
+- (nullable id)JSONValueForKey:(NSString *)key;
+
+/**
+ *  The list of keys in this object's JSON that are not listed as properties on the object.
+ */
+- (nullable NSArray<NSString *> *)additionalJSONKeys;
+
+/**
+ *  Setter for any key in the JSON that is not listed as a @c \@property in the class declaration.
+ */
+- (void)setAdditionalProperty:(id)obj forName:(NSString *)name;
+
+/**
+ *  Accessor for any key in the JSON that is not listed as a @c \@property in the class
+ *  declaration.
+ */
+- (nullable id)additionalPropertyForName:(NSString *)name;
+
+/**
+ *  A dictionary of all keys in the JSON that is not listed as a @c \@property in the class
+ *  declaration.
+ */
+- (NSDictionary<NSString *, id> *)additionalProperties;
+
+/**
+ *  A string for a partial query describing the fields present.
+ *
+ *  @note Only the first element of any array is examined.
+ *
+ *  @see https://developers.google.com/google-apps/tasks/performance?csw=1#partial
+ *
+ *  @return A @c fields string describing the fields present in the object.
+ */
+- (NSString *)fieldsDescription;
+
+/**
+ *  An object containing only the changes needed to do a partial update (patch),
+ *  where the patch would be to change an object from the original to the receiver,
+ *  such as
+ *    @c GTLRSomeObject *patchObject = [newVersion patchObjectFromOriginal:oldVersion];
+ *
+ *  @note This method returns nil if there are no changes between the original and the receiver.
+ *
+ *  @see https://developers.google.com/google-apps/tasks/performance?csw=1#patch
+ *
+ *  @param original The original object from which to create the patch object.
+ *
+ *  @return The object used for the patch body.
+ */
+- (nullable id)patchObjectFromOriginal:(GTLRObject *)original;
+
+/**
+ *  A null value to set object properties for patch queries that delete fields.
+ *
+ *  Do not use this except when setting an object property for a patch query.
+ *
+ *  @return The null value object.
+ */
++ (id)nullValue;
+
+#pragma mark Internal
+
+///////////////////////////////////////////////////////////////////////////////
+//
+// Protected methods
+//
+// These methods are intended for subclasses of GTLRObject
+//
+
+// Creation of objects from a JSON dictionary. The class created depends on
+// the content of the JSON, not the class messaged.
++ (nullable GTLRObject *)objectForJSON:(NSMutableDictionary *)json
+                          defaultClass:(nullable Class)defaultClass
+                   objectClassResolver:(id<GTLRObjectClassResolver>)objectClassResolver;
+
+// Property-to-key mapping (for JSON keys which are not used as method names)
++ (nullable NSDictionary<NSString *, NSString *> *)propertyToJSONKeyMap;
+
+// property-to-Class mapping for array properties (to say what is in the array)
++ (nullable NSDictionary<NSString *, Class> *)arrayPropertyToClassMap;
+
+// The default class for additional JSON keys
++ (nullable Class)classForAdditionalProperties;
+
+// Indicates if a "kind" property on this class can be used for the class
+// registry or if it appears to be non standard.
++ (BOOL)isKindValidForClassRegistry;
+
+@end
+
+/**
+ *  Collection results have a property containing an array of @c GTLRObject
+ *
+ *  This provides support for @c NSFastEnumeration and for indexed subscripting to
+ *  access the objects in the array.
+ */
+@interface GTLRCollectionObject : GTLRObject<NSFastEnumeration>
+
+/**
+ *  The property name that holds the collection.
+ *
+ *  @return The key for the property holding the array of @c GTLRObject items.
+ */
++ (NSString *)collectionItemsKey;
+
+// objectAtIndexedSubscript: will throw if the index is out of bounds (like
+// NSArray does).
+- (nullable id)objectAtIndexedSubscript:(NSUInteger)idx;
+
+@end
+
+/**
+ *  A GTLRDataObject holds media data and the MIME type of the data returned by a media
+ *  download query.
+ *
+ *  The JSON for the object may be nil.
+ */
+@interface GTLRDataObject : GTLRObject
+
+/**
+ *  The downloaded media data.
+ */
+@property(atomic, strong) NSData *data;
+
+/**
+ *  The MIME type of the downloaded media data.
+ */
+@property(atomic, copy) NSString *contentType;
+
+@end
+
+/**
+ *  Base class used when a service method directly returns an array instead
+ *  of a JSON object. This exists for the methods not up to spec.
+ */
+@interface GTLRResultArray : GTLRCollectionObject
+
+/**
+ *  This method should only be called by subclasses.
+ */
+- (nullable NSArray *)itemsWithItemClass:(Class)itemClass;
+@end
+
+/**
+ *  Helper to call the resolver and find the class to use for the given JSON.
+ *  Intended for internal library use only.
+ */
+Class GTLRObjectResolveClass(
+    id<GTLRObjectClassResolver> objectClassResolver,
+    NSDictionary *json,
+    Class defaultClass);
+
+NS_ASSUME_NONNULL_END

+ 253 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRQuery.h

@@ -0,0 +1,253 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Query documentation:
+// https://github.com/google/google-api-objectivec-client-for-rest/wiki#query-operations
+
+#import "GTLRObject.h"
+#import "GTLRUploadParameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class GTLRServiceTicket;
+@class GTLRServiceExecutionParameters;
+@class GTLRQuery;
+
+/**
+ * This protocol is just to support passing of either a batch or a single query
+ * to a GTLRService instance. The library does not expect or support client app
+ * implementations of this protocol.
+ */
+@protocol GTLRQueryProtocol <NSObject, NSCopying>
+
+/**
+ *  Service ticket values may be set in the execution parameters for an individual query
+ *  prior to executing the query.
+ */
+@property(atomic, strong, null_resettable) GTLRServiceExecutionParameters *executionParameters;
+
+- (BOOL)isBatchQuery;
+- (BOOL)hasExecutionParameters;
+- (BOOL)shouldSkipAuthorization;
+- (void)invalidateQuery;
+- (nullable NSDictionary<NSString *, NSString *> *)additionalHTTPHeaders;
+- (nullable NSDictionary<NSString *, NSString *> *)additionalURLQueryParameters;
+- (nullable NSString *)loggingName;
+- (nullable GTLRUploadParameters *)uploadParameters;
+
+@end
+
+@protocol GTLRQueryCollectionProtocol
+@optional
+@property(nonatomic, strong) NSString *pageToken;
+@end
+
+/**
+ *  A block called when a query completes executing.
+ *
+ *  Errors passed to the completionBlock will have an "underlying" GTLRErrorObject
+ *  when the server returned an error for this specific query:
+ *
+ *    GTLRErrorObject *errorObj = [GTLRErrorObject underlyingObjectForError:callbackError];
+ *    if (errorObj) {
+ *      // The server returned this error for this specific query.
+ *    } else {
+ *      // The query execution fetch failed.
+ *    }
+ *
+ *  @param callbackTicket The ticket that tracked query execution.
+ *  @param object         The result of query execution. This will be derived from
+ *                        GTLRObject.
+ *  @param callbackError  If non-nil, the query execution failed.
+ */
+typedef void (^GTLRQueryCompletionBlock)(GTLRServiceTicket *callbackTicket,
+                                         id _Nullable object,
+                                         NSError * _Nullable callbackError);
+
+/**
+ *  Class for a single query.
+ */
+@interface GTLRQuery : NSObject <GTLRQueryProtocol, NSCopying>
+
+/**
+ *  The object to be uploaded with the query. The JSON of this object becomes
+ *  the body for PUT and POST requests.
+ */
+@property(atomic, strong, nullable) GTLRObject *bodyObject;
+
+/**
+ *  Each query must have a request ID string. The client app may replace the
+ *  default assigned request ID with a custom string, provided that if
+ *  used in a batch query, all request IDs in the batch must be unique.
+ */
+@property(atomic, copy) NSString *requestID;
+
+/**
+ *  For queries which support file upload, the MIME type and file URL
+ *  or data must be provided.
+ */
+@property(atomic, copy, nullable) GTLRUploadParameters *uploadParameters;
+
+/**
+ *  Any additional URL query parameters for this query.
+ *
+ *  These query parameters override the same keys from the service object's
+ *  additionalURLQueryParameters
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalURLQueryParameters;
+
+/**
+ *  Any additional HTTP headers for this query.
+ *
+ *  These headers override the same keys from the service object's additionalHTTPHeaders
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalHTTPHeaders;
+
+/**
+ *  If set, when the query is executed, an @c "alt" query parameter is added
+ *  with this value and the raw result of the query is returned in a
+ *  GTLRDataObject. This is useful when the server documents result datatypes
+ *  other than JSON ("csv", for example).
+ */
+@property(atomic, copy) NSString *downloadAsDataObjectType;
+
+/**
+ * If set, and the query also has a non-empty @c downloadAsDataObjectType, the
+ * URL to download from will be modified to include "download/". This extra path
+ * component avoids the need for a server redirect to the download URL.
+ */
+@property(atomic, assign) BOOL useMediaDownloadService;
+
+/**
+ *  Clients may set this to YES to disallow authorization. Defaults to NO.
+ */
+@property(atomic, assign) BOOL shouldSkipAuthorization;
+
+/**
+ *  An optional callback block to be called immediately before the executeQuery: completion handler.
+ *
+ *  The completionBlock property is particularly useful for queries executed in a batch.
+ */
+@property(atomic, copy, nullable) GTLRQueryCompletionBlock completionBlock;
+
+/**
+ *  The brief string to identify this query in GTMSessionFetcher http logs.
+ *
+ *  A default logging name is set by the code generator, but may be overridden by the client app.
+ */
+@property(atomic, copy, nullable) NSString *loggingName;
+
+#pragma mark Internal
+/////////////////////////////////////////////////////////////////////////////////////////////
+//
+// Properties below are used by the library and aren't typically needed by client apps.
+//
+/////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ *  The URITemplate path segment. This is initialized in by the service generator.
+ */
+@property(atomic, readonly) NSString *pathURITemplate;
+
+/**
+ *  The HTTP method to use for this query. This is initialized in by the service generator.
+ */
+@property(atomic, readonly, nullable) NSString *httpMethod;
+
+/**
+ *  The parameters names that are in the URI Template.
+ *  This is initialized in by the service generator.
+ *
+ *  The service generator collects these via the discovery info instead of having to parse the
+ *  template to figure out what is part of the path.
+ */
+@property(atomic, readonly, nullable) NSArray<NSString *> *pathParameterNames;
+
+/**
+ *  The JSON dictionary of all the parameters set on this query.
+ *
+ *  The JSON values are set by setting the query's properties.
+ */
+@property(nonatomic, strong, nullable) NSMutableDictionary<NSString *, id> *JSON;
+
+/**
+ *  A custom URI template for resumable uploads.  This is initialized by the service generator
+ *  if needed.
+ */
+@property(atomic, copy, nullable) NSString *resumableUploadPathURITemplateOverride;
+
+/**
+ *  A custom URI template for simple and multipart media uploads.  This is initialized
+ *  by the service generator.
+ */
+@property(atomic, copy, nullable) NSString *simpleUploadPathURITemplateOverride;
+
+/**
+ *  The GTLRObject subclass expected for results.  This is initialized by the service generator.
+ *
+ *  This is needed if the object returned by the server lacks a known "kind" string.
+ */
+@property(atomic, assign, nullable) Class expectedObjectClass;
+
+/**
+ *  Set when the query has been invalidated, meaning it was slated for execution so it's been copied
+ *  and its callbacks were released, or it's a copy that has finished executing.
+ *
+ *  Once a query has been invalidated, it cannot be executed, added to a batch, or copied.
+ */
+@property(atomic, assign, getter=isQueryInvalid) BOOL queryInvalid;
+
+/**
+ *  Internal query init method.
+ *
+ *  @param pathURITemplate    URI template to be filled in with parameters.
+ *  @param httpMethod         The requests's http method. A nil method will execute as GET.
+ *  @param pathParameterNames Names of parameters to be replaced in the template.
+ */
+- (instancetype)initWithPathURITemplate:(NSString *)pathURITemplate
+                             HTTPMethod:(nullable NSString *)httpMethod
+                     pathParameterNames:(nullable NSArray<NSString *> *)pathParameterNames NS_DESIGNATED_INITIALIZER;
+
+/**
+ *  @return Auto-generated request ID string.
+ */
++ (NSString *)nextRequestID;
+
+/**
+ *  Overridden by subclasses.
+ *
+ *  @return Substitute parameter names where needed for Objective-C or library compatibility.
+ */
++ (nullable NSDictionary<NSString *, NSString *> *)parameterNameMap;
+
+/**
+ *  Overridden by subclasses.
+ *
+ *  @return Map of property keys to specifying the class of objects to be instantiated in arrays.
+ */
++ (nullable NSDictionary<NSString *, Class> *)arrayPropertyToClassMap;
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+/**
+ *  The library doesn't use GTLRQueryCollectionImpl, but it provides a concrete implementation
+ *  of the protocol so the methods do not cause private method errors in Xcode/AppStore review.
+ */
+@interface GTLRQueryCollectionImpl : GTLRQuery <GTLRQueryCollectionProtocol>
+@end
+
+NS_ASSUME_NONNULL_END

+ 73 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRRuntimeCommon.h

@@ -0,0 +1,73 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import "GTLRDefines.h"
+
+@protocol GTLRObjectClassResolver;
+
+NS_ASSUME_NONNULL_BEGIN
+
+// This protocol and support class are an internal implementation detail so
+// GTLRObject and GTLRQuery can share some code.
+
+/**
+ *  An internal protocol for the GTLR library.
+ *
+ *  None of these methods should be used by client apps.
+ */
+@protocol GTLRRuntimeCommon <NSObject>
+@required
+// Get/Set properties
+- (void)setJSONValue:(nullable id)obj forKey:(NSString *)key;
+- (id)JSONValueForKey:(NSString *)key;
+// Child cache
+- (void)setCacheChild:(nullable id)obj forKey:(NSString *)key;
+- (nullable id)cacheChildForKey:(NSString *)key;
+// Object mapper.
+- (nullable id<GTLRObjectClassResolver>)objectClassResolver;
+// Key map
++ (nullable NSDictionary<NSString *, NSString *> *)propertyToJSONKeyMapForClass:(Class<GTLRRuntimeCommon>)aClass;
+// Array item types
++ (nullable NSDictionary<NSString *, Class> *)arrayPropertyToClassMapForClass:(Class<GTLRRuntimeCommon>)aClass;
+// The parent class for dynamic support
++ (nullable Class<GTLRRuntimeCommon>)ancestorClass;
+@end
+
+/**
+ *  An internal class for the GTLR library.
+ *
+ *  None of these methods should be used by client apps.
+ */
+@interface GTLRRuntimeCommon : NSObject
+// Wire things up.
++ (BOOL)resolveInstanceMethod:(SEL)sel onClass:(Class)onClass;
+// Helpers
++ (nullable id)objectFromJSON:(id)json
+                 defaultClass:(nullable Class)defaultClass
+          objectClassResolver:(id<GTLRObjectClassResolver>)objectClassResolver
+                  isCacheable:(nullable BOOL *)isCacheable;
++ (nullable id)jsonFromAPIObject:(id)obj
+                   expectedClass:(nullable Class)expectedClass
+                     isCacheable:(nullable BOOL *)isCacheable;
+// Walk up the class tree merging dictionaries and return the result.
++ (NSDictionary *)mergedClassDictionaryForSelector:(SEL)selector
+                                        startClass:(Class)startClass
+                                     ancestorClass:(Class)ancestorClass
+                                             cache:(NSMutableDictionary *)cache;
+@end
+
+NS_ASSUME_NONNULL_END

+ 879 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRService.h

@@ -0,0 +1,879 @@
+/* Copyright (c) 2016 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Service object documentation:
+// https://github.com/google/google-api-objectivec-client-for-rest/wiki#services-and-tickets
+
+#import <Foundation/Foundation.h>
+
+#import "GTLRDefines.h"
+#import "GTLRBatchQuery.h"
+#import "GTLRBatchResult.h"
+#import "GTLRDateTime.h"
+#import "GTLRDuration.h"
+#import "GTLRErrorObject.h"
+#import "GTLRObject.h"
+#import "GTLRQuery.h"
+
+@class GTMSessionFetcher;
+@class GTMSessionFetcherService;
+@protocol GTMFetcherAuthorizationProtocol;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  The domain used used for NSErrors created by GTLRService query execution.
+ */
+extern NSString *const kGTLRServiceErrorDomain;
+
+typedef NS_ENUM(NSInteger, GTLRServiceError) {
+  GTLRServiceErrorQueryResultMissing      = -3000,
+  GTLRServiceErrorBatchResponseUnexpected = -3001,
+  GTLRServiceErrorBatchResponseStatusCode = -3002
+};
+
+/**
+ *  The kGTLRServiceErrorDomain userInfo key for the server response body.
+ */
+extern NSString *const kGTLRServiceErrorBodyDataKey;
+
+/**
+ *  The kGTLRServiceErrorDomain userInfo key for the response content ID, if appropriate.
+ */
+extern NSString *const kGTLRServiceErrorContentIDKey;
+
+/**
+ *  The domain used for foundation errors created from GTLRErrorObjects that
+ *  were not originally foundation errors.
+ */
+extern NSString *const kGTLRErrorObjectDomain;
+
+/**
+ *  The userInfo key for a GTLRErrorObject for errors with domain kGTLRErrorObjectDomain
+ *  when the error was created from a structured JSON error response body.
+ */
+extern NSString *const kGTLRStructuredErrorKey;
+
+/**
+ *  A constant ETag for when updating or deleting a single entry, telling
+ *  the server to replace the current value unconditionally.
+ *
+ *  Do not use this in entries in a batch feed.
+ */
+extern NSString *const kGTLRETagWildcard;
+
+/**
+ *  Notification of a ticket starting.  The notification object is the ticket.
+ *  This is posted on the main thread.
+ *
+ *  Use the stopped notification to log all requests made by the library.
+ */
+extern NSString *const kGTLRServiceTicketStartedNotification;
+
+/**
+ *  Notification of a ticket stopping.  The notification object is the ticket.
+ *  This is posted on the main thread.
+ */
+extern NSString *const kGTLRServiceTicketStoppedNotification;
+
+/**
+ *  Notifications when parsing of a server response or entry begins.
+ *  This is posted on the main thread.
+ */
+extern NSString *const kGTLRServiceTicketParsingStartedNotification;
+
+/**
+ *  Notifications when parsing of a server response or entry ends.
+ *  This is posted on the main thread.
+ */
+extern NSString *const kGTLRServiceTicketParsingStoppedNotification;
+
+/**
+ *  The header name used to send an Application's Bundle Identifier.
+ *  For more information on adding API restrictions see the docs:
+ *    https://cloud.google.com/docs/authentication/api-keys#api_key_restrictions
+ */
+extern NSString *const kXIosBundleIdHeader;
+
+@class GTLRServiceTicket;
+
+/**
+ *  Callback block for query execution.
+ *
+ *  @param callbackTicket The ticket that tracked query execution.
+ *  @param object         The result of query execution. This will be derived from
+ *                        GTLRObject. The object may be nil for operations such as DELETE which
+ *                        do not return an object.  The object will be a GTLRBatchResult for
+ *                        batch operations, and GTLRDataObject for media downloads.
+ *  @param callbackError  If non-nil, the query execution failed.  For batch requests,
+ *                        this may be nil even if individual queries in the batch have failed.
+ */
+typedef void (^GTLRServiceCompletionHandler)(GTLRServiceTicket *callbackTicket,
+                                             id _Nullable object,
+                                             NSError * _Nullable callbackError);
+
+/**
+ *  Callback block for upload of query data.
+ *
+ *  @param progressTicket             The ticket that tracks query execution.
+ *  @param totalBytesUploaded         Number of bytes uploaded so far.
+ *  @param totalBytesExpectedToUpload Number of bytes expected to be uploaded.
+ */
+typedef void (^GTLRServiceUploadProgressBlock)(GTLRServiceTicket *progressTicket,
+                                               unsigned long long totalBytesUploaded,
+                                               unsigned long long totalBytesExpectedToUpload);
+
+/**
+ *  Callback block invoked when an eror occurs during query execution.
+ *
+ *  @param retryTicket        The ticket that tracks query execution.
+ *  @param suggestedWillRetry Flag indicating if the library would retry this without a retry block.
+ *  @param fetchError         The error that occurred. If the domain is
+ *                            kGTMSessionFetcherStatusDomain then the error's code is the server
+ *                            response status.  Details on the error from the server are available
+ *                            in the userInfo via the keys kGTLRStructuredErrorKey and
+ *                            NSLocalizedDescriptionKey.
+ *
+ *  @return YES if the request should be retried.
+ */
+typedef BOOL (^GTLRServiceRetryBlock)(GTLRServiceTicket *retryTicket,
+                                      BOOL suggestedWillRetry,
+                                      NSError * _Nullable fetchError);
+
+/**
+ *  Block to be invoked by a test block.
+ *
+ *  @param object The faked object, if any, to be passed to the test code's completion handler.
+ *  @param error  The faked error if any, to be passed to the test code's completion handler.
+ */
+typedef void (^GTLRServiceTestResponse)(id _Nullable object, NSError *_Nullable error);
+
+/**
+ *  A test block enables testing of query execution without any network activity.
+ *
+ *  The test block must finish by calling the response block, passing either an object
+ *  (GTLRObject or GTLRBatchResult) or an NSError.
+ *
+ *  The query is available to the test block code as testTicket.originalQuery.
+ *
+ *  Because query execution is asynchronous, the test code must wait for a callback,
+ *  either with GTLRService's waitForTicket:timeout:fetchedObject:error: or with
+ *  XCTestCase's waitForExpectationsWithTimeout:
+ *
+ *  Example usage is available in GTLRServiceTest.
+ *
+ *  @param testTicket   The ticket that tracks query execution.
+ *  @param testResponse A block that must be invoked by the test block. This may be invoked
+ *                      synchronously or asynchornously.
+ */
+typedef void (^GTLRServiceTestBlock)(GTLRServiceTicket *testTicket,
+                                     GTLRServiceTestResponse testResponse);
+
+#pragma mark -
+
+/**
+ *  Base class for the service that executes queries and manages tickets.
+ *
+ *  Client apps will typically use a generated subclass of GTLRService.
+ */
+@interface GTLRService : NSObject
+
+#pragma mark Query Execution
+
+/**
+ *  Executes the supplied query
+ *
+ *  Success is indicated in the completion handler by a nil error parameter, not by a non-nil
+ *  object parameter.
+ *
+ *  The callback block is invoked exactly once unless the ticket is cancelled.
+ *  The callback will be called on the service's callback queue.
+ *
+ *  Various execution parameters will be taken from the service's properties, unless overridden
+ *  in the query's @c executionParameters property.
+ *
+ *  A query may only be executed a single time. To reuse a query, make a copy before executing
+ *  it.
+ *
+ *  To get a NSURLRequest that represents the query, use @c -[GTLRService requestForQuery:]
+ *
+ *  @param query   The API query, either a subclass of GTLRQuery, or a GTLRBatchQuery.
+ *  @param handler The execution callback block.
+ *
+ *  @return A ticket for tracking or canceling query execution.
+ */
+- (GTLRServiceTicket *)executeQuery:(id<GTLRQueryProtocol>)query
+                  completionHandler:(nullable GTLRServiceCompletionHandler)handler;
+
+/**
+ *  Executes the supplied query
+ *
+ *  The callback is invoked exactly once unless the ticket is cancelled.
+ *  The callback will be called on the service's callbackQueue.
+ *  Various execution parameters will be taken from the service's properties, unless overridden
+ *  in the query's @c executionParameters property.
+ *
+ *  The selector should have a signature matching:
+ *  @code
+ *  - (void)serviceTicket:(GTLRServiceTicket *)callbackTicket
+ *     finishedWithObject:(GTLRObject *)object
+ *                  error:(NSError *)callbackError
+ *  @endcode
+ *
+ *  @param query            The API query, either a subclass of GTLRQuery, or a GTLRBatchQuery.
+ *  @param delegate         The object to be with the selector to be invoked upon completion.
+ *  @param finishedSelector The selector to be invoked upon completion.
+ *
+ *  @return A ticket for tracking or canceling query execution.
+ */
+- (GTLRServiceTicket *)executeQuery:(id<GTLRQueryProtocol>)query
+                           delegate:(nullable id)delegate
+                  didFinishSelector:(nullable SEL)finishedSelector;
+
+
+/**
+ *  Enable automatic pagination.
+ *
+ *  A ticket can optionally do a sequence of fetches for queries where repeated requests
+ *  with a @c nextPageToken query parameter is required to retrieve all pages of
+ *  the response collection.  The client's callback is invoked only when all items have
+ *  been retrieved, or an error has occurred.
+ *
+ *  The final object may be a combination of multiple page responses
+ *  so it may not be the same as if all results had been returned in a single
+ *  page. Some fields of the response may reflect only the final page's values.
+ *
+ *  Automatic page fetches will return an error if more than 25 page fetches are
+ *  required.  For debug builds, this will log a warning to the console when more
+ *  than 2 page fetches occur, as a reminder that the query's @c maxResults parameter
+ *  should probably be increased to specify more items returned per page.
+ *
+ *  Automatic page accumulation is available for query result objects that are derived
+ *  from GTLRCollectionObject.
+ *
+ *  This may also be specified for a single query in the query's @c executionParameters property.
+ *
+ *  Default value is NO.
+ */
+@property(nonatomic, assign) BOOL shouldFetchNextPages;
+
+/**
+ *  Some services require a developer key for quotas and limits.
+ *
+ *  If you have enabled the iOS API Key Restriction, you will want
+ *  to manually set the @c APIKeyRestrictionBundleID property, or
+ *  use -setMainBundleIDRestrictionWithAPIKey: to set your API key
+ *  and set the restriction to the main bundle's bundle id.
+ */
+@property(nonatomic, copy, nullable) NSString *APIKey;
+
+/**
+ *  The Bundle Identifier to use for the API key restriction. This will be
+ *  sent in an X-Ios-Bundle-Identifier header; for more information see
+ *  the API key documentation
+ *    https://cloud.google.com/docs/authentication/api-keys#api_key_restrictions
+ */
+@property(nonatomic, copy, nullable) NSString *APIKeyRestrictionBundleID;
+
+/**
+ *  Helper method to set the @c APIKey to the given value and set the
+ *  @c APIKeyRestrictionBundleID to the main bundle's bundle identifier.
+ */
+- (void)setMainBundleIDRestrictionWithAPIKey:(NSString *)apiKey;
+
+/**
+ *  An authorizer adds user authentication headers to the request as needed.
+ *
+ *  This may be overridden on individual queries with the @c shouldSkipAuthorization property.
+ */
+@property(nonatomic, retain, nullable) id <GTMFetcherAuthorizationProtocol> authorizer;
+
+/**
+ *  Enable fetcher retry support.  See the explanation of retry support in @c GTMSessionFetcher.h
+ *
+ *  Default value is NO, but retry is also enabled if the retryBlock is not nil.
+ *
+ *  This may also be specified for a single query in the query's @c executionParameters property.
+ */
+@property(nonatomic, assign, getter=isRetryEnabled) BOOL retryEnabled;
+
+/**
+ *  A retry block may be provided to inspect and change retry criteria.
+ *
+ *  This may also be specified for a single query in the query's @c executionParameters property.
+ */
+@property(atomic, copy, nullable) GTLRServiceRetryBlock retryBlock;
+
+/**
+ *  The maximum retry interval. Retries occur at increasing intervals, up to the specified maximum.
+ *
+ *  This may also be specified for a single query in the query's @c executionParameters property.
+ */
+@property(nonatomic, assign) NSTimeInterval maxRetryInterval;
+
+#pragma mark Fetch Object by Resource URL
+
+/**
+ *  Fetch an object given the resource URL. This is appropriate when the object's
+ *  full link is known, such as from a selfLink response property.
+ *
+ *  @param resourceURL         The URL of the object to be fetched.
+ *  @param objectClass         The GTLRObject subclass to be instantiated. If nil, the library
+ *                             will try to infer the class from the object's "kind" string property.
+ *  @param executionParameters Values to override the service's properties when executing the
+ *                             ticket.
+ *  @param handler             The execution callback block.
+ *
+ *  @return A ticket for tracking or canceling query execution.
+ */
+- (GTLRServiceTicket *)fetchObjectWithURL:(NSURL *)resourceURL
+                              objectClass:(nullable Class)objectClass
+                      executionParameters:(nullable GTLRServiceExecutionParameters *)executionParameters
+                        completionHandler:(nullable GTLRServiceCompletionHandler)handler;
+
+#pragma mark Support for Client Tests
+
+/**
+ *  A test block can be provided to test service calls without any network activity.
+ *
+ *  See the description of @c GTLRServiceTestBlock for additional details.
+ *
+ *  This may also be specified for a single query in the query's @c executionParameters property.
+ *
+ *  A service instance for testing can also be created with @c +mockServiceWithFakedObject
+ */
+@property(nonatomic, copy, nullable) GTLRServiceTestBlock testBlock;
+
+#pragma mark Converting a Query to an NSURLRequest
+
+/**
+ *  Creates a NSURLRequest from the query object and from properties on this service
+ *  (additionalHTTPHeaders, additionalURLQueryParameters, APIKey) without executing
+ *  it. This can be useful for using @c GTMSessionFetcher or @c NSURLSession to
+ *  perform the fetch.
+ *
+ *  For requests to non-public resources, the request will not yet be authorized;
+ *  that can be done using the GTLR service's authorizer. Creating a @c GTMSessionFetcher
+ *  from the GTLRService's @c fetcherService will take care of authorization as well.
+ *
+ *  This works only for GET queries, and only for an individual query, not a batch query.
+ *
+ *  @note @c Unlike executeQuery:, requestForQuery: does not release the query's callback blocks.
+ *
+ *  @param query The query used to create the request.
+ *
+ *  @return A request suitable for use with @c GTMSessionFetcher or @c NSURLSession
+ */
+- (NSMutableURLRequest *)requestForQuery:(GTLRQuery *)query;
+
+#pragma mark User Properties
+
+/**
+ *  The service properties dictionary is copied to become the initial property dictionary
+ *  for each ticket, augmented by a query's execution parameter's properties.
+ */
+@property(nonatomic, copy, nullable) NSDictionary<NSString *, id> *serviceProperties;
+
+#pragma mark JSON to GTLRObject Mapping
+
+/**
+ * Specifies subclasses to be created instead of standard library objects, allowing
+ * an app to add properties and methods to GTLR objects.
+ *
+ * This is just a helper method that sets the service's objectClassResolver:.
+ *
+ * Example:
+ * @code
+ *  NSDictionary *surrogates = @{
+ *    [MyDriveFile class]     : [GTLRDrive_File_Surrogate class],
+ *    [MyDriveFileList class] : [GTLRDrive_FileList_Surrogate class]
+ *  };
+ *  [service setSurrogates:surrogates];
+ * @endcode
+ */
+- (void)setSurrogates:(NSDictionary <Class, Class>*)surrogates;
+
+/**
+ *  Used to decide what GTLRObject subclass to make from the received JSON.
+ *
+ *  This defaults to a resolver that will use any kindStringToClassMap the service
+ *  provides.
+ *
+ *  To use a standard resolver with a surrogates dictionary, invoke setSurrogates: instead
+ *  of setting this property.
+ */
+@property(nonatomic, strong) id<GTLRObjectClassResolver> objectClassResolver;
+
+/**
+ *  A dictionary mapping "kind" strings to the GTLObject subclasses that should
+ *  be created for JSON with the given kind.
+ */
++ (NSDictionary<NSString *, Class> *)kindStringToClassMap;
+
+#pragma mark Request Settings
+
+/**
+ *  The queue used to invoked callbacks. By default, the main queue is used for callbacks.
+ */
+@property(nonatomic, retain) dispatch_queue_t callbackQueue;
+
+/**
+ *  Allows the application to make non-SSL and localhost requests for testing.
+ *
+ *  Default value is NO.
+ */
+@property(nonatomic, assign) BOOL allowInsecureQueries;
+
+/**
+ *  The fetcher service creates the fetcher instances for this API service.
+ *
+ *  Applications may set this to an authorized fetcher service created elsewhere
+ *  in the app, or may take the fetcher service created by this GTLRService and use it
+ *  to create fetchers independent of this service.
+ */
+@property(nonatomic, retain) GTMSessionFetcherService *fetcherService;
+
+#pragma mark Custom User Agents
+
+/**
+ *  Applications needing an additional identifier in the server logs may specify one
+ *  through this property and it will be added to the existing UserAgent. It should
+ *  already be a valid identifier as no cleaning/validation is done.
+ */
+@property(nonatomic, copy, nullable) NSString *userAgentAddition;
+
+/**
+ *  A base user-agent based on the application signature in the Info.plist settings.
+ *
+ *  Most applications should not explicitly set this property.  Any string provided will
+ *  be cleaned of inappropriate characters.
+ */
+@property(nonatomic, copy, nullable) NSString *userAgent;
+
+/**
+ * The request user agent includes the library and OS version appended to the
+ * base userAgent, along with the optional addition string.
+ */
+@property(nonatomic, readonly, nullable) NSString *requestUserAgent;
+
+/**
+ *  A precise base userAgent string identifying the application.  No cleaning of characters
+ *  is done. Library-specific details will be appended.
+ *
+ *  @param userAgent A wire-ready user agent string.
+ */
+- (void)setExactUserAgent:(nullable NSString *)userAgent;
+
+/**
+ *  A precise userAgent string to send on requests; no cleaning is done. When
+ *  set, requestUserAgent will be exactly this, no library or system information
+ *  will be auto added.
+ *
+ *  @param requestUserAgent A wire-ready user agent string.
+ */
+- (void)overrideRequestUserAgent:(nullable NSString *)requestUserAgent;
+
+/**
+ *  Any additional URL query parameters for the queries executed by this service.
+ *
+ *  Individual queries may have additionalURLQueryParameters specified as well.
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalURLQueryParameters;
+
+/**
+ *  Any additional HTTP headers for this queries executed by this service.
+ *
+ *  Individual queries may have additionalHTTPHeaders specified as well.
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, NSString *> *additionalHTTPHeaders;
+
+#pragma mark Request URL Construction
+
+/*
+ * The URL for where to send a Query is built out of these parts
+ * ( https://developers.google.com/discovery/v1/using#build-compose ) :
+ *
+ *   service.rootURLString + service.servicePath + query.pathURITemplate
+ *
+ * Note: odds are these both should end in a '/', so make sure any value you
+ * provide will combine correctly with the above rules.
+ */
+
+/**
+ *  The scheme and host for the API server.  This may be modified to point at a test server.
+ */
+@property(nonatomic, copy) NSString *rootURLString;
+
+/**
+ *  The path for the specific API service instance, relative to the rootURLString.
+ */
+@property(nonatomic, copy) NSString *servicePath;
+
+/**
+ *  A path fragment added in to URLs before "servicePath" to build
+ *  the full URL used for resumable media uploads.
+ */
+@property(nonatomic, copy) NSString *resumableUploadPath;
+
+/**
+ *  A path fragment added in to URLs before "servicePath" to build
+ *  the full URL used for simple and multipart media uploads.
+ */
+@property(nonatomic, copy) NSString *simpleUploadPath;
+
+/**
+ *  A path fragment added in to URLs before "servicePath" to build
+ *  the full URL used for batch requests.
+ */
+@property(nonatomic, copy) NSString *batchPath;
+
+#pragma mark Resumable Uploads
+
+/**
+ *  A block called to track upload progress.
+ *
+ *  A query's service execution parameters may be used to override this.
+ */
+@property(nonatomic, copy, nullable) GTLRServiceUploadProgressBlock uploadProgressBlock;
+
+/**
+ *  The default chunk size for resumable uploads.  This defaults to kGTLRStandardUploadChunkSize
+ *  for service subclasses that support chunked uploads.
+ */
+@property(nonatomic, assign) NSUInteger serviceUploadChunkSize;
+
+/**
+ *  Service subclasses may override this to specify their own default chunk size for
+ *  resumable uploads.
+ */
++ (NSUInteger)defaultServiceUploadChunkSize;
+
+#pragma mark Internal
+/////////////////////////////////////////////////////////////////////////////////////////////
+//
+// Properties below are used by the library and should not typically be set by client apps.
+//
+/////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ *  The queue used for parsing JSON responses.
+ *
+ *  Applications should typically not change this.
+ */
+@property(nonatomic, retain) dispatch_queue_t parseQueue;
+
+/**
+ *  If this service supports pretty printing the JSON on the wire, these are
+ *  the names of the query params that enable it. If there are any values,
+ *  the library disables pretty printing to save on bandwidth.
+ *
+ *  Applications should typically not need change this; the ServiceGenerator
+ *  will set this up when generating the custom subclass.
+ */
+@property(nonatomic, strong, nullable) NSArray<NSString *> *prettyPrintQueryParameterNames;
+
+/**
+ *  This indicates if the API requires a "data" JSON element to wrap the payload
+ *  on requests and responses.
+ *
+ *  Applications should typically not change this.
+ */
+@property(nonatomic, assign, getter=isDataWrapperRequired) BOOL dataWrapperRequired;
+
+@end
+
+@interface GTLRService (TestingSupport)
+
+/**
+ *  Convenience method to create a mock GTLR service just for testing.
+ *
+ *  Queries executed by this mock service will not perform any network operation,
+ *  but will invoke callbacks and provide the supplied object or error to the
+ *  completion handler.
+ *
+ *  You can make more customized mocks by setting the test block property of a service
+ *  or a query's execution parameters.  The test block can inspect the query as ticket.originalQuery
+ *  to customize test behavior.
+ *
+ *  See the description of @c GTLRServiceTestBlock for more details on customized testing.
+ *
+ *  Example usage is in the unit test method @c testService_MockService_Succeeding
+ *
+ *  @param object An object derived from GTLRObject to be passed to query completion handlers.
+ *  @param error       An error to be passed to query completion handlers.
+ *
+ *  @return A mock instance of the service, suitable for unit testing.
+ */
++ (instancetype)mockServiceWithFakedObject:(nullable id)object
+                                fakedError:(nullable NSError *)error;
+
+/**
+ *  Wait synchronously for fetch to complete (strongly discouraged)
+ *
+ *  This method is intended for use only in unit tests and command-line tools.
+ *  Unit tests may also use XCTest's waitForExpectationsWithTimeout: instead of
+ *  or after this method.
+ *
+ *  This method just runs the current event loop until the fetch completes
+ *  or the timout limit is reached.  This may discard unexpected events
+ *  that occur while spinning, so it's really not appropriate for use
+ *  in serious applications.
+ *
+ *  Returns YES if an object was successfully fetched.  If the wait
+ *  timed out, returns NO and the returned error is nil.
+ *
+ *  @param ticket           The ticket being executed.
+ *  @param timeoutInSeconds Maximum duration to wait.
+ *
+ *  @return YES if the ticket completed or was cancelled; NO if the wait timed out.
+ */
+- (BOOL)waitForTicket:(GTLRServiceTicket *)ticket
+              timeout:(NSTimeInterval)timeoutInSeconds;
+
+@end
+
+#pragma mark -
+
+/**
+ *  Service execution parameters may be set on an individual query
+ *  to alter the service's settings.
+ */
+@interface GTLRServiceExecutionParameters : NSObject<NSCopying>
+
+/**
+ *  Override the service's property @c shouldFetchNextPages for automatic pagination.
+ *
+ *  A BOOL value should be specified.
+ */
+@property(atomic, strong, nullable) NSNumber *shouldFetchNextPages;
+
+/**
+ *  Override the service's property @c shouldFetchNextPages for enabling automatic retries.
+ *
+ *  A BOOL value should be specified.
+ *
+ *  Retry is also enabled if the retryBlock is not nil
+ */
+@property(atomic, strong, nullable, getter=isRetryEnabled) NSNumber *retryEnabled;
+
+/**
+ *  Override the service's property @c retryBlock for customizing automatic retries.
+ */
+@property(atomic, copy, nullable) GTLRServiceRetryBlock retryBlock;
+
+/**
+ *  Override the service's property @c maxRetryInterval for customizing automatic retries.
+ *
+ *  A NSTimeInterval (double) value should be specified.
+ */
+@property(atomic, strong, nullable) NSNumber *maxRetryInterval;
+
+/**
+ *  Override the service's property @c uploadProgressBlock for monitoring upload progress.
+ */
+@property(atomic, copy, nullable) GTLRServiceUploadProgressBlock uploadProgressBlock;
+
+/**
+ *  Override the service's property @c callbackQueue for invoking callbacks.
+ */
+@property(atomic, retain, nullable) dispatch_queue_t callbackQueue;
+
+/**
+ *  Override the service's property @c testBlock for simulating query execution.
+ *
+ *  See the description of @c GTLRServiceTestBlock for additional details.
+ */
+@property(atomic, copy, nullable) GTLRServiceTestBlock testBlock;
+
+/**
+ *  Override the service's property @c objectClassResolver for controlling object class selection.
+ */
+@property(atomic, strong, nullable) id<GTLRObjectClassResolver> objectClassResolver;
+
+/**
+ *  The ticket's properties are the service properties, with the execution parameter's
+ *  ticketProperties added (replacing any keys already present from the service.)
+ */
+@property(atomic, copy, nullable) NSDictionary<NSString *, id> *ticketProperties;
+
+/**
+ *  Indicates if any of the execution parameters properties are set.
+ */
+@property(nonatomic, readonly) BOOL hasParameters;
+
+@end
+
+/**
+ *  A ticket tracks the progress of a query being executed.
+ */
+@interface GTLRServiceTicket : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/**
+ *  The service that issued this ticket.
+ *
+ *  This method may be invoked from any thread.
+ */
+@property(atomic, readonly) GTLRService *service;
+
+#pragma mark Execution Control
+
+/**
+ *  Invoking cancelTicket stops the fetch if it is in progress.  The query callbacks
+ *  will not be invoked.
+ *
+ *  This method may be invoked from any thread.
+ */
+- (void)cancelTicket;
+
+/**
+ *  The time the ticket was created.
+ */
+@property(atomic, readonly) NSDate *creationDate;
+
+/**
+ *  Pause the ticket execution. This is valid only for chunked, resumable upload queries.
+ */
+- (void)pauseUpload;
+
+/**
+ *  Resume the ticket execution. This is valid only for chunked, resumable upload queries.
+ */
+- (void)resumeUpload;
+
+/**
+ *  Checks if the ticket execution is paused.
+ */
+@property(nonatomic, readonly, getter=isUploadPaused) BOOL uploadPaused;
+
+/**
+ *  The request being fetched for the query.
+ */
+@property(nonatomic, readonly, nullable) NSURLRequest *fetchRequest;
+
+/**
+ *  The fetcher being used for the query request.
+ */
+@property(atomic, readonly, nullable) GTMSessionFetcher *objectFetcher;
+
+/**
+ *  The queue used for query callbacks.
+ */
+@property(atomic, readonly) dispatch_queue_t callbackQueue;
+
+/**
+ *  The API key used for the query requeat.
+ */
+@property(atomic, readonly, nullable) NSString *APIKey;
+
+/**
+ *  The Bundle Identifier to use for the API key restriciton.
+ */
+@property(atomic, readonly, nullable) NSString *APIKeyRestrictionBundleID;
+
+#pragma mark Status
+
+/**
+ *  The server's response status for the query's fetch, if available.
+ */
+@property(nonatomic, readonly) NSInteger statusCode;
+
+/**
+ *  The error resulting from the query's fetch, if available.
+ */
+@property(nonatomic, readonly, nullable) NSError *fetchError;
+
+/**
+ *  A flag indicating if the query's callbacks have been invoked.
+ */
+@property(nonatomic, readonly) BOOL hasCalledCallback;
+
+/**
+ *  A flag indicating if the query execution was cancelled by the client app.
+ */
+@property(atomic, readonly, getter=isCancelled) BOOL cancelled;
+
+#pragma mark Pagination
+
+/**
+ *  A flag indicating if automatic pagination is enabled for the query.
+ */
+@property(nonatomic, readonly) BOOL shouldFetchNextPages;
+
+/**
+ *  The number of pages fetched, if automatic pagination is enabled for the query and multiple
+ *  pages have been fetched.
+ */
+@property(nonatomic, readonly) NSUInteger pagesFetchedCounter;
+
+#pragma mark User Properties
+
+/**
+ *  Ticket properties a way to pass values via the ticket for the convenience of the client app.
+ *
+ *  Ticket properties are initialized from serviceProperties and augmented by the ticketProperties
+ *  of the query's execution parameters.
+ */
+@property(nonatomic, readonly, nullable) NSDictionary<NSString *, id> *ticketProperties;
+
+#pragma mark Payload
+
+/**
+ *  The object being uploaded via POST, PUT, or PATCH.
+ */
+@property(nonatomic, readonly, nullable) GTLRObject *postedObject;
+
+/**
+ *  The object downloaded for the query, after parsing.
+ */
+@property(nonatomic, readonly, nullable) GTLRObject *fetchedObject;
+
+/**
+ *  The query currently being fetched by this ticket. This may not be the original query when
+ *  fetching a second or later pages.
+ */
+@property(atomic, readonly, nullable) id<GTLRQueryProtocol> executingQuery;
+
+/**
+ *  The query used to create this ticket
+ */
+@property(atomic, readonly, nullable) id<GTLRQueryProtocol> originalQuery;
+
+/**
+ *  The @c GTLRObjectClassResolver for controlling object class selection.
+ */
+@property(atomic, readonly, strong) id<GTLRObjectClassResolver> objectClassResolver;
+
+/**
+ *  The query from within the ticket's batch request with the given ID.
+ *
+ *  @param requestID The desired ticket's request ID.
+ *
+ *  @return The query with the specified ID, if found.
+ */
+- (nullable GTLRQuery *)queryForRequestID:(NSString *)requestID;
+
+@end
+
+/**
+ *  The library doesn't use GTLRObjectCollectionImpl, but it provides a concrete implementation
+ *  so the methods do not cause private method errors in Xcode/AppStore review.
+ */
+@interface GTLRObjectCollectionImpl : GTLRObject
+@property(nonatomic, copy) NSString *nextPageToken;
+@end
+
+NS_ASSUME_NONNULL_END

+ 48 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRURITemplate.h

@@ -0,0 +1,48 @@
+/* Copyright (c) 2010 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+#ifndef SKIP_GTLR_DEFINES
+  #import "GTLRDefines.h"
+#endif
+
+NS_ASSUME_NONNULL_BEGIN
+
+//
+// URI Template
+//
+// http://tools.ietf.org/html/draft-gregorio-uritemplate-04
+//
+// NOTE: This implemention is only a subset of the spec.  It should be able
+// to parse any tempate that matches the spec, but if the template makes use
+// of a feature that is not supported, it will fail with an error.
+//
+
+@interface GTLRURITemplate : NSObject
+
+// Process the template.  If the template uses an unsupported feature, it will
+// throw an exception to help catch that limitation.  Currently unsupported
+// feature is partial result modifiers (prefix/suffix).
+//
+// valueProvider should be anything that implements -objectForKey:.  At the
+// simplest level, this can be an NSDictionary.  However, a custom class that
+// implements valueForKey my be better for some uses.
++ (NSString *)expandTemplate:(NSString *)URITemplate
+                      values:(NSDictionary *)valueProvider;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 124 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRUploadParameters.h

@@ -0,0 +1,124 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Uploading documentation:
+// https://github.com/google/google-api-objectivec-client-for-rest/wiki#uploading-files
+
+#import <Foundation/Foundation.h>
+
+#import "GTLRDefines.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  Upload parameters are required for chunked-resumable or simple/multipart uploads.
+ *
+ *  The MIME type and one source for data (@c NSData, file URL, or @c NSFileHandle) must
+ *  be specified.
+ */
+@interface GTLRUploadParameters : NSObject <NSCopying>
+
+/**
+ *  The type of media being uploaded.
+ */
+@property(atomic, copy, nullable) NSString *MIMEType;
+
+/**
+ *  The media to be uploaded, represented as @c NSData.
+ */
+@property(atomic, retain, nullable) NSData *data;
+
+/**
+ *  The URL for the local file to be uploaded.
+ */
+@property(atomic, retain, nullable) NSURL *fileURL;
+
+/**
+ *  The media to be uploaded, represented as @c NSFileHandle.
+ *
+ *  @note This property is provided for compatibility with older code.
+ *        Uploading using @c fileURL is preferred over @c fileHandle
+ */
+@property(atomic, retain, nullable) NSFileHandle *fileHandle;
+
+/**
+ *  Resuming an in-progress resumable, chunked upload is done with the upload location URL,
+ *  and requires a file URL or file handle for uploading.
+ */
+@property(atomic, retain, nullable) NSURL *uploadLocationURL;
+
+/**
+ *  Small uploads (for example, under 200K) can be done with a single multipart upload
+ *  request. The upload body must be provided as NSData, not a file URL or file handle.
+ *
+ *  Default value is NO.
+ */
+@property(atomic, assign) BOOL shouldUploadWithSingleRequest;
+
+/**
+ *  Uploads may be done without a JSON body as metadata in the initial request.
+ *
+ *  Default value is NO.
+ */
+@property(atomic, assign) BOOL shouldSendUploadOnly;
+
+/**
+ *  Uploads may use a background session when uploading via GTMSessionUploadFetcher.
+ *  Since background session fetches are slower than foreground fetches, this defaults
+ *  to NO.
+ *
+ *  It's reasonable for an application to set this to YES for a rare upload of a large file.
+ *
+ *  Default value is NO.
+ *
+ *  For more information about the hazards of background sessions, see the header comments for
+ *  the GTMSessionFetcher useBackgroundSession property.
+ */
+@property(atomic, assign) BOOL useBackgroundSession;
+
+/**
+ *  Constructor for uploading from @c NSData.
+ *
+ *  @param data     The data to uploaded.
+ *  @param mimeType The media's type.
+ *
+ *  @return The upload parameters object.
+ */
++ (instancetype)uploadParametersWithData:(NSData *)data
+                                MIMEType:(NSString *)mimeType;
+
+/**
+ *  Constructor for uploading from a file URL.
+ *
+ *  @param fileURL  The file to upload.
+ *  @param mimeType The media's type.
+ *
+ *  @return The upload parameters object.
+ */
++ (instancetype)uploadParametersWithFileURL:(NSURL *)fileURL
+                                   MIMEType:(NSString *)mimeType;
+
+/**
+ *  Constructor for uploading from a file handle.
+ *
+ *  @note This method is provided for compatibility with older code.  To upload files,
+ *        use a file URL.
+ */
++ (instancetype)uploadParametersWithFileHandle:(NSFileHandle *)fileHandle
+                                      MIMEType:(NSString *)mimeType;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 52 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRUtilities.h

@@ -0,0 +1,52 @@
+/* Copyright (c) 2011 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Foundation/Foundation.h>
+
+#ifndef SKIP_GTLR_DEFINES
+  #import "GTLRDefines.h"
+#endif
+
+NS_ASSUME_NONNULL_BEGIN
+
+// Helper functions for implementing isEqual:
+BOOL GTLR_AreEqualOrBothNil(id _Nullable obj1, id _Nullable obj2);
+BOOL GTLR_AreBoolsEqual(BOOL b1, BOOL b2);
+
+// Helper to ensure a number is a number.
+//
+// The Google API servers will send numbers >53 bits as strings to avoid
+// bugs in some JavaScript implementations.  Work around this by catching
+// the string and turning it back into a number.
+NSNumber *GTLR_EnsureNSNumber(NSNumber *num);
+
+@interface GTLRUtilities : NSObject
+
+// Key-value coding searches in an array
+//
+// Utilities to get from an array objects having a known value (or nil)
+// at a keyPath
+
++ (NSArray *)objectsFromArray:(NSArray *)sourceArray
+                    withValue:(id)desiredValue
+                   forKeyPath:(NSString *)keyPath;
+
++ (nullable id)firstObjectFromArray:(NSArray *)sourceArray
+                          withValue:(id)desiredValue
+                         forKeyPath:(NSString *)keyPath;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 15 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVision.h

@@ -0,0 +1,15 @@
+// NOTE: This file was generated by the ServiceGenerator.
+
+// ----------------------------------------------------------------------------
+// API:
+//   Cloud Vision API (vision/v1)
+// Description:
+//   Integrates Google Vision features, including image labeling, face, logo,
+//   and landmark detection, optical character recognition (OCR), and detection
+//   of explicit content, into applications.
+// Documentation:
+//   https://cloud.google.com/vision/
+
+#import "GTLRVisionObjects.h"
+#import "GTLRVisionQuery.h"
+#import "GTLRVisionService.h"

+ 11155 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionObjects.h

@@ -0,0 +1,11155 @@
+// NOTE: This file was generated by the ServiceGenerator.
+
+// ----------------------------------------------------------------------------
+// API:
+//   Cloud Vision API (vision/v1)
+// Description:
+//   Integrates Google Vision features, including image labeling, face, logo,
+//   and landmark detection, optical character recognition (OCR), and detection
+//   of explicit content, into applications.
+// Documentation:
+//   https://cloud.google.com/vision/
+
+#if GTLR_BUILT_AS_FRAMEWORK
+  #import "GTLR/GTLRObject.h"
+#else
+  #import "GTLRObject.h"
+#endif
+
+#if GTLR_RUNTIME_VERSION != 3000
+#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
+#endif
+
+@class GTLRVision_AnnotateImageRequest;
+@class GTLRVision_AnnotateImageResponse;
+@class GTLRVision_AsyncAnnotateFileRequest;
+@class GTLRVision_AsyncAnnotateFileResponse;
+@class GTLRVision_Block;
+@class GTLRVision_BoundingPoly;
+@class GTLRVision_Color;
+@class GTLRVision_ColorInfo;
+@class GTLRVision_CropHint;
+@class GTLRVision_CropHintsAnnotation;
+@class GTLRVision_CropHintsParams;
+@class GTLRVision_DetectedBreak;
+@class GTLRVision_DetectedLanguage;
+@class GTLRVision_DominantColorsAnnotation;
+@class GTLRVision_EntityAnnotation;
+@class GTLRVision_FaceAnnotation;
+@class GTLRVision_Feature;
+@class GTLRVision_GcsDestination;
+@class GTLRVision_GcsSource;
+@class GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse;
+@class GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Block;
+@class GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo;
+@class GTLRVision_GoogleCloudVisionV1p1beta1CropHint;
+@class GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark;
+@class GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination;
+@class GTLRVision_GoogleCloudVisionV1p1beta1GcsSource;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties;
+@class GTLRVision_GoogleCloudVisionV1p1beta1InputConfig;
+@class GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo;
+@class GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex;
+@class GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Page;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Paragraph;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Position;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Product;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult;
+@class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Property;
+@class GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Symbol;
+@class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak;
+@class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage;
+@class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Vertex;
+@class GTLRVision_GoogleCloudVisionV1p1beta1WebDetection;
+@class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity;
+@class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage;
+@class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel;
+@class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage;
+@class GTLRVision_GoogleCloudVisionV1p1beta1Word;
+@class GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse;
+@class GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Block;
+@class GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo;
+@class GTLRVision_GoogleCloudVisionV1p2beta1CropHint;
+@class GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark;
+@class GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination;
+@class GTLRVision_GoogleCloudVisionV1p2beta1GcsSource;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties;
+@class GTLRVision_GoogleCloudVisionV1p2beta1InputConfig;
+@class GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo;
+@class GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex;
+@class GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Page;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Paragraph;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Position;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Product;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult;
+@class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Property;
+@class GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Symbol;
+@class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak;
+@class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage;
+@class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Vertex;
+@class GTLRVision_GoogleCloudVisionV1p2beta1WebDetection;
+@class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity;
+@class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage;
+@class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel;
+@class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage;
+@class GTLRVision_GoogleCloudVisionV1p2beta1Word;
+@class GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse;
+@class GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Block;
+@class GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo;
+@class GTLRVision_GoogleCloudVisionV1p3beta1CropHint;
+@class GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark;
+@class GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination;
+@class GTLRVision_GoogleCloudVisionV1p3beta1GcsSource;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties;
+@class GTLRVision_GoogleCloudVisionV1p3beta1InputConfig;
+@class GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo;
+@class GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex;
+@class GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Page;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Paragraph;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Position;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Product;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Property;
+@class GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage;
+@class GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Symbol;
+@class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation;
+@class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak;
+@class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage;
+@class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Vertex;
+@class GTLRVision_GoogleCloudVisionV1p3beta1WebDetection;
+@class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity;
+@class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage;
+@class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel;
+@class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage;
+@class GTLRVision_GoogleCloudVisionV1p3beta1Word;
+@class GTLRVision_GroupedResult;
+@class GTLRVision_Image;
+@class GTLRVision_ImageAnnotationContext;
+@class GTLRVision_ImageContext;
+@class GTLRVision_ImageProperties;
+@class GTLRVision_ImageSource;
+@class GTLRVision_ImportProductSetsGcsSource;
+@class GTLRVision_ImportProductSetsInputConfig;
+@class GTLRVision_InputConfig;
+@class GTLRVision_KeyValue;
+@class GTLRVision_Landmark;
+@class GTLRVision_LatLng;
+@class GTLRVision_LatLongRect;
+@class GTLRVision_LocalizedObjectAnnotation;
+@class GTLRVision_LocationInfo;
+@class GTLRVision_NormalizedVertex;
+@class GTLRVision_Operation;
+@class GTLRVision_Operation_Metadata;
+@class GTLRVision_Operation_Response;
+@class GTLRVision_OutputConfig;
+@class GTLRVision_Page;
+@class GTLRVision_Paragraph;
+@class GTLRVision_Position;
+@class GTLRVision_Product;
+@class GTLRVision_ProductSearchParams;
+@class GTLRVision_ProductSearchResults;
+@class GTLRVision_ProductSet;
+@class GTLRVision_Property;
+@class GTLRVision_ReferenceImage;
+@class GTLRVision_Result;
+@class GTLRVision_SafeSearchAnnotation;
+@class GTLRVision_Status;
+@class GTLRVision_Status_Details_Item;
+@class GTLRVision_Symbol;
+@class GTLRVision_TextAnnotation;
+@class GTLRVision_TextProperty;
+@class GTLRVision_Vertex;
+@class GTLRVision_WebDetection;
+@class GTLRVision_WebDetectionParams;
+@class GTLRVision_WebEntity;
+@class GTLRVision_WebImage;
+@class GTLRVision_WebLabel;
+@class GTLRVision_WebPage;
+@class GTLRVision_Word;
+
+// Generated comments include content from the discovery document; avoid them
+// causing warnings since clang's checks are some what arbitrary.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdocumentation"
+
+NS_ASSUME_NONNULL_BEGIN
+
+// ----------------------------------------------------------------------------
+// Constants - For some of the classes' properties below.
+
+// ----------------------------------------------------------------------------
+// GTLRVision_BatchOperationMetadata.state
+
+/**
+ *  The request is done after the longrunning.Operations.CancelOperation has
+ *  been called by the user. Any records that were processed before the
+ *  cancel command are output as specified in the request.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Cancelled;
+/**
+ *  The request is done and no item has been successfully processed.
+ *
+ *  Value: "FAILED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Failed;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "PROCESSING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Processing;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_StateUnspecified;
+/**
+ *  The request is done and at least one item has been successfully
+ *  processed.
+ *
+ *  Value: "SUCCESSFUL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Successful;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_Block.blockType
+
+/**
+ *  Barcode block.
+ *
+ *  Value: "BARCODE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Barcode;
+/**
+ *  Image block.
+ *
+ *  Value: "PICTURE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Picture;
+/**
+ *  Horizontal/vertical line box.
+ *
+ *  Value: "RULER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Ruler;
+/**
+ *  Table block.
+ *
+ *  Value: "TABLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Table;
+/**
+ *  Regular text block.
+ *
+ *  Value: "TEXT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Text;
+/**
+ *  Unknown block type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_DetectedBreak.type
+
+/**
+ *  Line-wrapping break.
+ *
+ *  Value: "EOL_SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_EolSureSpace;
+/**
+ *  End-line hyphen that is not present in text; does not co-occur with
+ *  `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
+ *
+ *  Value: "HYPHEN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Hyphen;
+/**
+ *  Line break that ends a paragraph.
+ *
+ *  Value: "LINE_BREAK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_LineBreak;
+/**
+ *  Regular space.
+ *
+ *  Value: "SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Space;
+/**
+ *  Sure space (very wide).
+ *
+ *  Value: "SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_SureSpace;
+/**
+ *  Unknown break label type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.angerLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.blurredLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.headwearLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.joyLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.sorrowLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.surpriseLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_FaceAnnotation.underExposedLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_Feature.type
+
+/**
+ *  Run crop hints.
+ *
+ *  Value: "CROP_HINTS"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_CropHints;
+/**
+ *  Run dense text document OCR. Takes precedence when both
+ *  `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
+ *
+ *  Value: "DOCUMENT_TEXT_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_DocumentTextDetection;
+/**
+ *  Run face detection.
+ *
+ *  Value: "FACE_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_FaceDetection;
+/**
+ *  Compute a set of image properties, such as the
+ *  image's dominant colors.
+ *
+ *  Value: "IMAGE_PROPERTIES"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ImageProperties;
+/**
+ *  Run label detection.
+ *
+ *  Value: "LABEL_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LabelDetection;
+/**
+ *  Run landmark detection.
+ *
+ *  Value: "LANDMARK_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LandmarkDetection;
+/**
+ *  Run logo detection.
+ *
+ *  Value: "LOGO_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LogoDetection;
+/**
+ *  Run localizer for object detection.
+ *
+ *  Value: "OBJECT_LOCALIZATION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ObjectLocalization;
+/**
+ *  Run Product Search.
+ *
+ *  Value: "PRODUCT_SEARCH"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ProductSearch;
+/**
+ *  Run Safe Search to detect potentially unsafe
+ *  or undesirable content.
+ *
+ *  Value: "SAFE_SEARCH_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_SafeSearchDetection;
+/**
+ *  Run text detection / optical character recognition (OCR). Text detection
+ *  is optimized for areas of text within a larger image; if the image is
+ *  a document, use `DOCUMENT_TEXT_DETECTION` instead.
+ *
+ *  Value: "TEXT_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_TextDetection;
+/**
+ *  Unspecified feature type.
+ *
+ *  Value: "TYPE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_TypeUnspecified;
+/**
+ *  Run web detection.
+ *
+ *  Value: "WEB_DETECTION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_WebDetection;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1Block.blockType
+
+/**
+ *  Barcode block.
+ *
+ *  Value: "BARCODE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Barcode;
+/**
+ *  Image block.
+ *
+ *  Value: "PICTURE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Picture;
+/**
+ *  Horizontal/vertical line box.
+ *
+ *  Value: "RULER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Ruler;
+/**
+ *  Table block.
+ *
+ *  Value: "TABLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Table;
+/**
+ *  Regular text block.
+ *
+ *  Value: "TEXT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Text;
+/**
+ *  Unknown block type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.angerLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.blurredLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.headwearLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.joyLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.sorrowLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.surpriseLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.underExposedLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark.type
+
+/**
+ *  Chin gnathion.
+ *
+ *  Value: "CHIN_GNATHION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinGnathion;
+/**
+ *  Chin left gonion.
+ *
+ *  Value: "CHIN_LEFT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
+/**
+ *  Chin right gonion.
+ *
+ *  Value: "CHIN_RIGHT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinRightGonion;
+/**
+ *  Forehead glabella.
+ *
+ *  Value: "FOREHEAD_GLABELLA"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
+/**
+ *  Left ear tragion.
+ *
+ *  Value: "LEFT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEarTragion;
+/**
+ *  Left eye.
+ *
+ *  Value: "LEFT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEye;
+/**
+ *  Left eye, bottom boundary.
+ *
+ *  Value: "LEFT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
+/**
+ *  Left eyebrow, upper midpoint.
+ *
+ *  Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
+/**
+ *  Left eye, left corner.
+ *
+ *  Value: "LEFT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
+/**
+ *  Left eye pupil.
+ *
+ *  Value: "LEFT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyePupil;
+/**
+ *  Left eye, right corner.
+ *
+ *  Value: "LEFT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
+/**
+ *  Left eye, top boundary.
+ *
+ *  Value: "LEFT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
+/**
+ *  Left of left eyebrow.
+ *
+ *  Value: "LEFT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
+/**
+ *  Left of right eyebrow.
+ *
+ *  Value: "LEFT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
+/**
+ *  Lower lip.
+ *
+ *  Value: "LOWER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LowerLip;
+/**
+ *  Midpoint between eyes.
+ *
+ *  Value: "MIDPOINT_BETWEEN_EYES"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
+/**
+ *  Mouth center.
+ *
+ *  Value: "MOUTH_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthCenter;
+/**
+ *  Mouth left.
+ *
+ *  Value: "MOUTH_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthLeft;
+/**
+ *  Mouth right.
+ *
+ *  Value: "MOUTH_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthRight;
+/**
+ *  Nose, bottom center.
+ *
+ *  Value: "NOSE_BOTTOM_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
+/**
+ *  Nose, bottom left.
+ *
+ *  Value: "NOSE_BOTTOM_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
+/**
+ *  Nose, bottom right.
+ *
+ *  Value: "NOSE_BOTTOM_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomRight;
+/**
+ *  Nose tip.
+ *
+ *  Value: "NOSE_TIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseTip;
+/**
+ *  Right ear tragion.
+ *
+ *  Value: "RIGHT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEarTragion;
+/**
+ *  Right eye.
+ *
+ *  Value: "RIGHT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEye;
+/**
+ *  Right eye, bottom boundary.
+ *
+ *  Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
+/**
+ *  Right eyebrow, upper midpoint.
+ *
+ *  Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
+/**
+ *  Right eye, left corner.
+ *
+ *  Value: "RIGHT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
+/**
+ *  Right eye pupil.
+ *
+ *  Value: "RIGHT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyePupil;
+/**
+ *  Right eye, right corner.
+ *
+ *  Value: "RIGHT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
+/**
+ *  Right eye, top boundary.
+ *
+ *  Value: "RIGHT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
+/**
+ *  Right of left eyebrow.
+ *
+ *  Value: "RIGHT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
+/**
+ *  Right of right eyebrow.
+ *
+ *  Value: "RIGHT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
+/**
+ *  Unknown face landmark detected. Should not be filled.
+ *
+ *  Value: "UNKNOWN_LANDMARK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UnknownLandmark;
+/**
+ *  Upper lip.
+ *
+ *  Value: "UPPER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UpperLip;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata.state
+
+/**
+ *  The batch processing was cancelled.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Cancelled;
+/**
+ *  Request is received.
+ *
+ *  Value: "CREATED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Created;
+/**
+ *  The batch processing is done.
+ *
+ *  Value: "DONE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Done;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "RUNNING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Running;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_StateUnspecified;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.adult
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.medical
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.racy
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.spoof
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.violence
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak.type
+
+/**
+ *  Line-wrapping break.
+ *
+ *  Value: "EOL_SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
+/**
+ *  End-line hyphen that is not present in text; does not co-occur with
+ *  `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
+ *
+ *  Value: "HYPHEN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Hyphen;
+/**
+ *  Line break that ends a paragraph.
+ *
+ *  Value: "LINE_BREAK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_LineBreak;
+/**
+ *  Regular space.
+ *
+ *  Value: "SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Space;
+/**
+ *  Sure space (very wide).
+ *
+ *  Value: "SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_SureSpace;
+/**
+ *  Unknown break label type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1Block.blockType
+
+/**
+ *  Barcode block.
+ *
+ *  Value: "BARCODE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Barcode;
+/**
+ *  Image block.
+ *
+ *  Value: "PICTURE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Picture;
+/**
+ *  Horizontal/vertical line box.
+ *
+ *  Value: "RULER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Ruler;
+/**
+ *  Table block.
+ *
+ *  Value: "TABLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Table;
+/**
+ *  Regular text block.
+ *
+ *  Value: "TEXT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Text;
+/**
+ *  Unknown block type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.angerLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.blurredLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.headwearLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.joyLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.sorrowLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.surpriseLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.underExposedLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark.type
+
+/**
+ *  Chin gnathion.
+ *
+ *  Value: "CHIN_GNATHION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinGnathion;
+/**
+ *  Chin left gonion.
+ *
+ *  Value: "CHIN_LEFT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
+/**
+ *  Chin right gonion.
+ *
+ *  Value: "CHIN_RIGHT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinRightGonion;
+/**
+ *  Forehead glabella.
+ *
+ *  Value: "FOREHEAD_GLABELLA"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
+/**
+ *  Left ear tragion.
+ *
+ *  Value: "LEFT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEarTragion;
+/**
+ *  Left eye.
+ *
+ *  Value: "LEFT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEye;
+/**
+ *  Left eye, bottom boundary.
+ *
+ *  Value: "LEFT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
+/**
+ *  Left eyebrow, upper midpoint.
+ *
+ *  Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
+/**
+ *  Left eye, left corner.
+ *
+ *  Value: "LEFT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
+/**
+ *  Left eye pupil.
+ *
+ *  Value: "LEFT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyePupil;
+/**
+ *  Left eye, right corner.
+ *
+ *  Value: "LEFT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
+/**
+ *  Left eye, top boundary.
+ *
+ *  Value: "LEFT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
+/**
+ *  Left of left eyebrow.
+ *
+ *  Value: "LEFT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
+/**
+ *  Left of right eyebrow.
+ *
+ *  Value: "LEFT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
+/**
+ *  Lower lip.
+ *
+ *  Value: "LOWER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LowerLip;
+/**
+ *  Midpoint between eyes.
+ *
+ *  Value: "MIDPOINT_BETWEEN_EYES"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
+/**
+ *  Mouth center.
+ *
+ *  Value: "MOUTH_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthCenter;
+/**
+ *  Mouth left.
+ *
+ *  Value: "MOUTH_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthLeft;
+/**
+ *  Mouth right.
+ *
+ *  Value: "MOUTH_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthRight;
+/**
+ *  Nose, bottom center.
+ *
+ *  Value: "NOSE_BOTTOM_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
+/**
+ *  Nose, bottom left.
+ *
+ *  Value: "NOSE_BOTTOM_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
+/**
+ *  Nose, bottom right.
+ *
+ *  Value: "NOSE_BOTTOM_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomRight;
+/**
+ *  Nose tip.
+ *
+ *  Value: "NOSE_TIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseTip;
+/**
+ *  Right ear tragion.
+ *
+ *  Value: "RIGHT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEarTragion;
+/**
+ *  Right eye.
+ *
+ *  Value: "RIGHT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEye;
+/**
+ *  Right eye, bottom boundary.
+ *
+ *  Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
+/**
+ *  Right eyebrow, upper midpoint.
+ *
+ *  Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
+/**
+ *  Right eye, left corner.
+ *
+ *  Value: "RIGHT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
+/**
+ *  Right eye pupil.
+ *
+ *  Value: "RIGHT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyePupil;
+/**
+ *  Right eye, right corner.
+ *
+ *  Value: "RIGHT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
+/**
+ *  Right eye, top boundary.
+ *
+ *  Value: "RIGHT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
+/**
+ *  Right of left eyebrow.
+ *
+ *  Value: "RIGHT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
+/**
+ *  Right of right eyebrow.
+ *
+ *  Value: "RIGHT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
+/**
+ *  Unknown face landmark detected. Should not be filled.
+ *
+ *  Value: "UNKNOWN_LANDMARK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UnknownLandmark;
+/**
+ *  Upper lip.
+ *
+ *  Value: "UPPER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UpperLip;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata.state
+
+/**
+ *  The batch processing was cancelled.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Cancelled;
+/**
+ *  Request is received.
+ *
+ *  Value: "CREATED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Created;
+/**
+ *  The batch processing is done.
+ *
+ *  Value: "DONE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Done;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "RUNNING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Running;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_StateUnspecified;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.adult
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.medical
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.racy
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.spoof
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.violence
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak.type
+
+/**
+ *  Line-wrapping break.
+ *
+ *  Value: "EOL_SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
+/**
+ *  End-line hyphen that is not present in text; does not co-occur with
+ *  `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
+ *
+ *  Value: "HYPHEN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Hyphen;
+/**
+ *  Line break that ends a paragraph.
+ *
+ *  Value: "LINE_BREAK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_LineBreak;
+/**
+ *  Regular space.
+ *
+ *  Value: "SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Space;
+/**
+ *  Sure space (very wide).
+ *
+ *  Value: "SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_SureSpace;
+/**
+ *  Unknown break label type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata.state
+
+/**
+ *  The request is done after the longrunning.Operations.CancelOperation has
+ *  been called by the user. Any records that were processed before the
+ *  cancel command are output as specified in the request.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Cancelled;
+/**
+ *  The request is done and no item has been successfully processed.
+ *
+ *  Value: "FAILED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Failed;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "PROCESSING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Processing;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_StateUnspecified;
+/**
+ *  The request is done and at least one item has been successfully
+ *  processed.
+ *
+ *  Value: "SUCCESSFUL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Successful;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1Block.blockType
+
+/**
+ *  Barcode block.
+ *
+ *  Value: "BARCODE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Barcode;
+/**
+ *  Image block.
+ *
+ *  Value: "PICTURE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Picture;
+/**
+ *  Horizontal/vertical line box.
+ *
+ *  Value: "RULER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Ruler;
+/**
+ *  Table block.
+ *
+ *  Value: "TABLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Table;
+/**
+ *  Regular text block.
+ *
+ *  Value: "TEXT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Text;
+/**
+ *  Unknown block type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.angerLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.blurredLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.headwearLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.joyLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.sorrowLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.surpriseLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.underExposedLikelihood
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark.type
+
+/**
+ *  Chin gnathion.
+ *
+ *  Value: "CHIN_GNATHION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinGnathion;
+/**
+ *  Chin left gonion.
+ *
+ *  Value: "CHIN_LEFT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
+/**
+ *  Chin right gonion.
+ *
+ *  Value: "CHIN_RIGHT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinRightGonion;
+/**
+ *  Forehead glabella.
+ *
+ *  Value: "FOREHEAD_GLABELLA"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
+/**
+ *  Left ear tragion.
+ *
+ *  Value: "LEFT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEarTragion;
+/**
+ *  Left eye.
+ *
+ *  Value: "LEFT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEye;
+/**
+ *  Left eye, bottom boundary.
+ *
+ *  Value: "LEFT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
+/**
+ *  Left eyebrow, upper midpoint.
+ *
+ *  Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
+/**
+ *  Left eye, left corner.
+ *
+ *  Value: "LEFT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
+/**
+ *  Left eye pupil.
+ *
+ *  Value: "LEFT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyePupil;
+/**
+ *  Left eye, right corner.
+ *
+ *  Value: "LEFT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
+/**
+ *  Left eye, top boundary.
+ *
+ *  Value: "LEFT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
+/**
+ *  Left of left eyebrow.
+ *
+ *  Value: "LEFT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
+/**
+ *  Left of right eyebrow.
+ *
+ *  Value: "LEFT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
+/**
+ *  Lower lip.
+ *
+ *  Value: "LOWER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LowerLip;
+/**
+ *  Midpoint between eyes.
+ *
+ *  Value: "MIDPOINT_BETWEEN_EYES"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
+/**
+ *  Mouth center.
+ *
+ *  Value: "MOUTH_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthCenter;
+/**
+ *  Mouth left.
+ *
+ *  Value: "MOUTH_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthLeft;
+/**
+ *  Mouth right.
+ *
+ *  Value: "MOUTH_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthRight;
+/**
+ *  Nose, bottom center.
+ *
+ *  Value: "NOSE_BOTTOM_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
+/**
+ *  Nose, bottom left.
+ *
+ *  Value: "NOSE_BOTTOM_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
+/**
+ *  Nose, bottom right.
+ *
+ *  Value: "NOSE_BOTTOM_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomRight;
+/**
+ *  Nose tip.
+ *
+ *  Value: "NOSE_TIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseTip;
+/**
+ *  Right ear tragion.
+ *
+ *  Value: "RIGHT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEarTragion;
+/**
+ *  Right eye.
+ *
+ *  Value: "RIGHT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEye;
+/**
+ *  Right eye, bottom boundary.
+ *
+ *  Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
+/**
+ *  Right eyebrow, upper midpoint.
+ *
+ *  Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
+/**
+ *  Right eye, left corner.
+ *
+ *  Value: "RIGHT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
+/**
+ *  Right eye pupil.
+ *
+ *  Value: "RIGHT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyePupil;
+/**
+ *  Right eye, right corner.
+ *
+ *  Value: "RIGHT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
+/**
+ *  Right eye, top boundary.
+ *
+ *  Value: "RIGHT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
+/**
+ *  Right of left eyebrow.
+ *
+ *  Value: "RIGHT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
+/**
+ *  Right of right eyebrow.
+ *
+ *  Value: "RIGHT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
+/**
+ *  Unknown face landmark detected. Should not be filled.
+ *
+ *  Value: "UNKNOWN_LANDMARK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UnknownLandmark;
+/**
+ *  Upper lip.
+ *
+ *  Value: "UPPER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UpperLip;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata.state
+
+/**
+ *  The batch processing was cancelled.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Cancelled;
+/**
+ *  Request is received.
+ *
+ *  Value: "CREATED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Created;
+/**
+ *  The batch processing is done.
+ *
+ *  Value: "DONE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Done;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "RUNNING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Running;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_StateUnspecified;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.adult
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.medical
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.racy
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.spoof
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.violence
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak.type
+
+/**
+ *  Line-wrapping break.
+ *
+ *  Value: "EOL_SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
+/**
+ *  End-line hyphen that is not present in text; does not co-occur with
+ *  `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
+ *
+ *  Value: "HYPHEN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Hyphen;
+/**
+ *  Line break that ends a paragraph.
+ *
+ *  Value: "LINE_BREAK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_LineBreak;
+/**
+ *  Regular space.
+ *
+ *  Value: "SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Space;
+/**
+ *  Sure space (very wide).
+ *
+ *  Value: "SURE_SPACE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_SureSpace;
+/**
+ *  Unknown break label type.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Unknown;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_Landmark.type
+
+/**
+ *  Chin gnathion.
+ *
+ *  Value: "CHIN_GNATHION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinGnathion;
+/**
+ *  Chin left gonion.
+ *
+ *  Value: "CHIN_LEFT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinLeftGonion;
+/**
+ *  Chin right gonion.
+ *
+ *  Value: "CHIN_RIGHT_GONION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinRightGonion;
+/**
+ *  Forehead glabella.
+ *
+ *  Value: "FOREHEAD_GLABELLA"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ForeheadGlabella;
+/**
+ *  Left ear tragion.
+ *
+ *  Value: "LEFT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEarTragion;
+/**
+ *  Left eye.
+ *
+ *  Value: "LEFT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEye;
+/**
+ *  Left eye, bottom boundary.
+ *
+ *  Value: "LEFT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeBottomBoundary;
+/**
+ *  Left eyebrow, upper midpoint.
+ *
+ *  Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyebrowUpperMidpoint;
+/**
+ *  Left eye, left corner.
+ *
+ *  Value: "LEFT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeLeftCorner;
+/**
+ *  Left eye pupil.
+ *
+ *  Value: "LEFT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyePupil;
+/**
+ *  Left eye, right corner.
+ *
+ *  Value: "LEFT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeRightCorner;
+/**
+ *  Left eye, top boundary.
+ *
+ *  Value: "LEFT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeTopBoundary;
+/**
+ *  Left of left eyebrow.
+ *
+ *  Value: "LEFT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftOfLeftEyebrow;
+/**
+ *  Left of right eyebrow.
+ *
+ *  Value: "LEFT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftOfRightEyebrow;
+/**
+ *  Lower lip.
+ *
+ *  Value: "LOWER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LowerLip;
+/**
+ *  Midpoint between eyes.
+ *
+ *  Value: "MIDPOINT_BETWEEN_EYES"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MidpointBetweenEyes;
+/**
+ *  Mouth center.
+ *
+ *  Value: "MOUTH_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthCenter;
+/**
+ *  Mouth left.
+ *
+ *  Value: "MOUTH_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthLeft;
+/**
+ *  Mouth right.
+ *
+ *  Value: "MOUTH_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthRight;
+/**
+ *  Nose, bottom center.
+ *
+ *  Value: "NOSE_BOTTOM_CENTER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomCenter;
+/**
+ *  Nose, bottom left.
+ *
+ *  Value: "NOSE_BOTTOM_LEFT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomLeft;
+/**
+ *  Nose, bottom right.
+ *
+ *  Value: "NOSE_BOTTOM_RIGHT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomRight;
+/**
+ *  Nose tip.
+ *
+ *  Value: "NOSE_TIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseTip;
+/**
+ *  Right ear tragion.
+ *
+ *  Value: "RIGHT_EAR_TRAGION"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEarTragion;
+/**
+ *  Right eye.
+ *
+ *  Value: "RIGHT_EYE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEye;
+/**
+ *  Right eye, bottom boundary.
+ *
+ *  Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeBottomBoundary;
+/**
+ *  Right eyebrow, upper midpoint.
+ *
+ *  Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyebrowUpperMidpoint;
+/**
+ *  Right eye, left corner.
+ *
+ *  Value: "RIGHT_EYE_LEFT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeLeftCorner;
+/**
+ *  Right eye pupil.
+ *
+ *  Value: "RIGHT_EYE_PUPIL"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyePupil;
+/**
+ *  Right eye, right corner.
+ *
+ *  Value: "RIGHT_EYE_RIGHT_CORNER"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeRightCorner;
+/**
+ *  Right eye, top boundary.
+ *
+ *  Value: "RIGHT_EYE_TOP_BOUNDARY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeTopBoundary;
+/**
+ *  Right of left eyebrow.
+ *
+ *  Value: "RIGHT_OF_LEFT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightOfLeftEyebrow;
+/**
+ *  Right of right eyebrow.
+ *
+ *  Value: "RIGHT_OF_RIGHT_EYEBROW"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightOfRightEyebrow;
+/**
+ *  Unknown face landmark detected. Should not be filled.
+ *
+ *  Value: "UNKNOWN_LANDMARK"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_UnknownLandmark;
+/**
+ *  Upper lip.
+ *
+ *  Value: "UPPER_LIP"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_UpperLip;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_OperationMetadata.state
+
+/**
+ *  The batch processing was cancelled.
+ *
+ *  Value: "CANCELLED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Cancelled;
+/**
+ *  Request is received.
+ *
+ *  Value: "CREATED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Created;
+/**
+ *  The batch processing is done.
+ *
+ *  Value: "DONE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Done;
+/**
+ *  Request is actively being processed.
+ *
+ *  Value: "RUNNING"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Running;
+/**
+ *  Invalid.
+ *
+ *  Value: "STATE_UNSPECIFIED"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_StateUnspecified;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_SafeSearchAnnotation.adult
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_SafeSearchAnnotation.medical
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_SafeSearchAnnotation.racy
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_SafeSearchAnnotation.spoof
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_VeryUnlikely;
+
+// ----------------------------------------------------------------------------
+// GTLRVision_SafeSearchAnnotation.violence
+
+/**
+ *  It is likely that the image belongs to the specified vertical.
+ *
+ *  Value: "LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Likely;
+/**
+ *  It is possible that the image belongs to the specified vertical.
+ *
+ *  Value: "POSSIBLE"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Possible;
+/**
+ *  Unknown likelihood.
+ *
+ *  Value: "UNKNOWN"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Unknown;
+/**
+ *  It is unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Unlikely;
+/**
+ *  It is very likely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_LIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_VeryLikely;
+/**
+ *  It is very unlikely that the image belongs to the specified vertical.
+ *
+ *  Value: "VERY_UNLIKELY"
+ */
+GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_VeryUnlikely;
+
+/**
+ *  Request message for the `AddProductToProductSet` method.
+ */
+@interface GTLRVision_AddProductToProductSetRequest : GTLRObject
+
+/**
+ *  The resource name for the Product to be added to this ProductSet.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *product;
+
+@end
+
+
+/**
+ *  Response to a single file annotation request. A file may contain one or more
+ *  images, which individually have their own responses.
+ */
+@interface GTLRVision_AnnotateFileResponse : GTLRObject
+
+/** Information about the file for which this response is generated. */
+@property(nonatomic, strong, nullable) GTLRVision_InputConfig *inputConfig;
+
+/** Individual responses to images found within the file. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageResponse *> *responses;
+
+@end
+
+
+/**
+ *  Request for performing Google Cloud Vision API tasks over a user-provided
+ *  image, with user-requested features.
+ */
+@interface GTLRVision_AnnotateImageRequest : GTLRObject
+
+/** Requested features. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Feature *> *features;
+
+/** The image to be processed. */
+@property(nonatomic, strong, nullable) GTLRVision_Image *image;
+
+/** Additional context that may accompany the image. */
+@property(nonatomic, strong, nullable) GTLRVision_ImageContext *imageContext;
+
+@end
+
+
+/**
+ *  Response to an image annotation request.
+ */
+@interface GTLRVision_AnnotateImageResponse : GTLRObject
+
+/**
+ *  If present, contextual information is needed to understand where this image
+ *  comes from.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_ImageAnnotationContext *context;
+
+/** If present, crop hints have completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_CropHintsAnnotation *cropHintsAnnotation;
+
+/**
+ *  If set, represents the error message for the operation.
+ *  Note that filled-in image annotations are guaranteed to be
+ *  correct, even when `error` is set.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Status *error;
+
+/** If present, face detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_FaceAnnotation *> *faceAnnotations;
+
+/**
+ *  If present, text (OCR) detection or document (OCR) text detection has
+ *  completed successfully.
+ *  This annotation provides the structural hierarchy for the OCR detected
+ *  text.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_TextAnnotation *fullTextAnnotation;
+
+/** If present, image properties were extracted successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_ImageProperties *imagePropertiesAnnotation;
+
+/** If present, label detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *labelAnnotations;
+
+/** If present, landmark detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *landmarkAnnotations;
+
+/**
+ *  If present, localized object detection has completed successfully.
+ *  This will be sorted descending by confidence score.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_LocalizedObjectAnnotation *> *localizedObjectAnnotations;
+
+/** If present, logo detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *logoAnnotations;
+
+/** If present, product search has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_ProductSearchResults *productSearchResults;
+
+/** If present, safe-search annotation has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_SafeSearchAnnotation *safeSearchAnnotation;
+
+/** If present, text (OCR) detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *textAnnotations;
+
+/** If present, web detection has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_WebDetection *webDetection;
+
+@end
+
+
+/**
+ *  An offline file annotation request.
+ */
+@interface GTLRVision_AsyncAnnotateFileRequest : GTLRObject
+
+/** Required. Requested features. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Feature *> *features;
+
+/** Additional context that may accompany the image(s) in the file. */
+@property(nonatomic, strong, nullable) GTLRVision_ImageContext *imageContext;
+
+/** Required. Information about the input file. */
+@property(nonatomic, strong, nullable) GTLRVision_InputConfig *inputConfig;
+
+/** Required. The desired output location and metadata (e.g. format). */
+@property(nonatomic, strong, nullable) GTLRVision_OutputConfig *outputConfig;
+
+@end
+
+
+/**
+ *  The response for a single offline file annotation request.
+ */
+@interface GTLRVision_AsyncAnnotateFileResponse : GTLRObject
+
+/** The output location and metadata from AsyncAnnotateFileRequest. */
+@property(nonatomic, strong, nullable) GTLRVision_OutputConfig *outputConfig;
+
+@end
+
+
+/**
+ *  Multiple async file annotation requests are batched into a single service
+ *  call.
+ */
+@interface GTLRVision_AsyncBatchAnnotateFilesRequest : GTLRObject
+
+/** Individual async file annotation requests for this batch. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_AsyncAnnotateFileRequest *> *requests;
+
+@end
+
+
+/**
+ *  Response to an async batch file annotation request.
+ */
+@interface GTLRVision_AsyncBatchAnnotateFilesResponse : GTLRObject
+
+/**
+ *  The list of file annotation responses, one for each request in
+ *  AsyncBatchAnnotateFilesRequest.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_AsyncAnnotateFileResponse *> *responses;
+
+@end
+
+
+/**
+ *  Multiple image annotation requests are batched into a single service call.
+ */
+@interface GTLRVision_BatchAnnotateImagesRequest : GTLRObject
+
+/** Individual image annotation requests for this batch. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageRequest *> *requests;
+
+@end
+
+
+/**
+ *  Response to a batch image annotation request.
+ */
+@interface GTLRVision_BatchAnnotateImagesResponse : GTLRObject
+
+/** Individual responses to image annotation requests within the batch. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageResponse *> *responses;
+
+@end
+
+
+/**
+ *  Metadata for the batch operations such as the current state.
+ *  This is included in the `metadata` field of the `Operation` returned by the
+ *  `GetOperation` call of the `google::longrunning::Operations` service.
+ */
+@interface GTLRVision_BatchOperationMetadata : GTLRObject
+
+/**
+ *  The time when the batch request is finished and
+ *  google.longrunning.Operation.done is set to true.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *endTime;
+
+/**
+ *  The current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_BatchOperationMetadata_State_Cancelled The request is
+ *        done after the longrunning.Operations.CancelOperation has
+ *        been called by the user. Any records that were processed before the
+ *        cancel command are output as specified in the request. (Value:
+ *        "CANCELLED")
+ *    @arg @c kGTLRVision_BatchOperationMetadata_State_Failed The request is
+ *        done and no item has been successfully processed. (Value: "FAILED")
+ *    @arg @c kGTLRVision_BatchOperationMetadata_State_Processing Request is
+ *        actively being processed. (Value: "PROCESSING")
+ *    @arg @c kGTLRVision_BatchOperationMetadata_State_StateUnspecified Invalid.
+ *        (Value: "STATE_UNSPECIFIED")
+ *    @arg @c kGTLRVision_BatchOperationMetadata_State_Successful The request is
+ *        done and at least one item has been successfully
+ *        processed. (Value: "SUCCESSFUL")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the batch request was submitted to the server. */
+@property(nonatomic, strong, nullable) GTLRDateTime *submitTime;
+
+@end
+
+
+/**
+ *  Logical element on the page.
+ */
+@interface GTLRVision_Block : GTLRObject
+
+/**
+ *  Detected block type (text, image etc) for this block.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_Block_BlockType_Barcode Barcode block. (Value:
+ *        "BARCODE")
+ *    @arg @c kGTLRVision_Block_BlockType_Picture Image block. (Value:
+ *        "PICTURE")
+ *    @arg @c kGTLRVision_Block_BlockType_Ruler Horizontal/vertical line box.
+ *        (Value: "RULER")
+ *    @arg @c kGTLRVision_Block_BlockType_Table Table block. (Value: "TABLE")
+ *    @arg @c kGTLRVision_Block_BlockType_Text Regular text block. (Value:
+ *        "TEXT")
+ *    @arg @c kGTLRVision_Block_BlockType_Unknown Unknown block type. (Value:
+ *        "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *blockType;
+
+/**
+ *  The bounding box for the block.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results on the block. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** List of paragraphs in this block (if this blocks is of type text). */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Paragraph *> *paragraphs;
+
+/** Additional information detected for the block. */
+@property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
+
+@end
+
+
+/**
+ *  A bounding polygon for the detected image annotation.
+ */
+@interface GTLRVision_BoundingPoly : GTLRObject
+
+/** The bounding polygon normalized vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_NormalizedVertex *> *normalizedVertices;
+
+/** The bounding polygon vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Vertex *> *vertices;
+
+@end
+
+
+/**
+ *  The request message for Operations.CancelOperation.
+ */
+@interface GTLRVision_CancelOperationRequest : GTLRObject
+@end
+
+
+/**
+ *  Represents a color in the RGBA color space. This representation is designed
+ *  for simplicity of conversion to/from color representations in various
+ *  languages over compactness; for example, the fields of this representation
+ *  can be trivially provided to the constructor of "java.awt.Color" in Java; it
+ *  can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha"
+ *  method in iOS; and, with just a little work, it can be easily formatted into
+ *  a CSS "rgba()" string in JavaScript, as well. Here are some examples:
+ *  Example (Java):
+ *  import com.google.type.Color;
+ *  // ...
+ *  public static java.awt.Color fromProto(Color protocolor) {
+ *  float alpha = protocolor.hasAlpha()
+ *  ? protocolor.getAlpha().getValue()
+ *  : 1.0;
+ *  return new java.awt.Color(
+ *  protocolor.getRed(),
+ *  protocolor.getGreen(),
+ *  protocolor.getBlue(),
+ *  alpha);
+ *  }
+ *  public static Color toProto(java.awt.Color color) {
+ *  float red = (float) color.getRed();
+ *  float green = (float) color.getGreen();
+ *  float blue = (float) color.getBlue();
+ *  float denominator = 255.0;
+ *  Color.Builder resultBuilder =
+ *  Color
+ *  .newBuilder()
+ *  .setRed(red / denominator)
+ *  .setGreen(green / denominator)
+ *  .setBlue(blue / denominator);
+ *  int alpha = color.getAlpha();
+ *  if (alpha != 255) {
+ *  result.setAlpha(
+ *  FloatValue
+ *  .newBuilder()
+ *  .setValue(((float) alpha) / denominator)
+ *  .build());
+ *  }
+ *  return resultBuilder.build();
+ *  }
+ *  // ...
+ *  Example (iOS / Obj-C):
+ *  // ...
+ *  static UIColor* fromProto(Color* protocolor) {
+ *  float red = [protocolor red];
+ *  float green = [protocolor green];
+ *  float blue = [protocolor blue];
+ *  FloatValue* alpha_wrapper = [protocolor alpha];
+ *  float alpha = 1.0;
+ *  if (alpha_wrapper != nil) {
+ *  alpha = [alpha_wrapper value];
+ *  }
+ *  return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
+ *  }
+ *  static Color* toProto(UIColor* color) {
+ *  CGFloat red, green, blue, alpha;
+ *  if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) {
+ *  return nil;
+ *  }
+ *  Color* result = [[Color alloc] init];
+ *  [result setRed:red];
+ *  [result setGreen:green];
+ *  [result setBlue:blue];
+ *  if (alpha <= 0.9999) {
+ *  [result setAlpha:floatWrapperWithValue(alpha)];
+ *  }
+ *  [result autorelease];
+ *  return result;
+ *  }
+ *  // ...
+ *  Example (JavaScript):
+ *  // ...
+ *  var protoToCssColor = function(rgb_color) {
+ *  var redFrac = rgb_color.red || 0.0;
+ *  var greenFrac = rgb_color.green || 0.0;
+ *  var blueFrac = rgb_color.blue || 0.0;
+ *  var red = Math.floor(redFrac * 255);
+ *  var green = Math.floor(greenFrac * 255);
+ *  var blue = Math.floor(blueFrac * 255);
+ *  if (!('alpha' in rgb_color)) {
+ *  return rgbToCssColor_(red, green, blue);
+ *  }
+ *  var alphaFrac = rgb_color.alpha.value || 0.0;
+ *  var rgbParams = [red, green, blue].join(',');
+ *  return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');
+ *  };
+ *  var rgbToCssColor_ = function(red, green, blue) {
+ *  var rgbNumber = new Number((red << 16) | (green << 8) | blue);
+ *  var hexString = rgbNumber.toString(16);
+ *  var missingZeros = 6 - hexString.length;
+ *  var resultBuilder = ['#'];
+ *  for (var i = 0; i < missingZeros; i++) {
+ *  resultBuilder.push('0');
+ *  }
+ *  resultBuilder.push(hexString);
+ *  return resultBuilder.join('');
+ *  };
+ *  // ...
+ */
+@interface GTLRVision_Color : GTLRObject
+
+/**
+ *  The fraction of this color that should be applied to the pixel. That is,
+ *  the final pixel color is defined by the equation:
+ *  pixel color = alpha * (this color) + (1.0 - alpha) * (background color)
+ *  This means that a value of 1.0 corresponds to a solid color, whereas
+ *  a value of 0.0 corresponds to a completely transparent color. This
+ *  uses a wrapper message rather than a simple float scalar so that it is
+ *  possible to distinguish between a default value and the value being unset.
+ *  If omitted, this color object is to be rendered as a solid color
+ *  (as if the alpha value had been explicitly given with a value of 1.0).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *alpha;
+
+/**
+ *  The amount of blue in the color as a value in the interval [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *blue;
+
+/**
+ *  The amount of green in the color as a value in the interval [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *green;
+
+/**
+ *  The amount of red in the color as a value in the interval [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *red;
+
+@end
+
+
+/**
+ *  Color information consists of RGB channels, score, and the fraction of
+ *  the image that the color occupies in the image.
+ */
+@interface GTLRVision_ColorInfo : GTLRObject
+
+/** RGB components of the color. */
+@property(nonatomic, strong, nullable) GTLRVision_Color *color;
+
+/**
+ *  The fraction of pixels the color occupies in the image.
+ *  Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pixelFraction;
+
+/**
+ *  Image-specific score for this color. Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Single crop hint that is used to generate a new crop when serving an image.
+ */
+@interface GTLRVision_CropHint : GTLRObject
+
+/**
+ *  The bounding polygon for the crop region. The coordinates of the bounding
+ *  box are in the original image's scale.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/**
+ *  Confidence of this being a salient region. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Fraction of importance of this salient region with respect to the original
+ *  image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *importanceFraction;
+
+@end
+
+
+/**
+ *  Set of crop hints that are used to generate new crops when serving images.
+ */
+@interface GTLRVision_CropHintsAnnotation : GTLRObject
+
+/** Crop hint results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_CropHint *> *cropHints;
+
+@end
+
+
+/**
+ *  Parameters for crop hints annotation request.
+ */
+@interface GTLRVision_CropHintsParams : GTLRObject
+
+/**
+ *  Aspect ratios in floats, representing the ratio of the width to the height
+ *  of the image. For example, if the desired aspect ratio is 4/3, the
+ *  corresponding float value should be 1.33333. If not specified, the
+ *  best possible crop is returned. The number of provided aspect ratios is
+ *  limited to a maximum of 16; any aspect ratios provided after the 16th are
+ *  ignored.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSArray<NSNumber *> *aspectRatios;
+
+@end
+
+
+/**
+ *  Detected start or end of a structural component.
+ */
+@interface GTLRVision_DetectedBreak : GTLRObject
+
+/**
+ *  True if break prepends the element.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *isPrefix;
+
+/**
+ *  Detected break type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_DetectedBreak_Type_EolSureSpace Line-wrapping break.
+ *        (Value: "EOL_SURE_SPACE")
+ *    @arg @c kGTLRVision_DetectedBreak_Type_Hyphen End-line hyphen that is not
+ *        present in text; does not co-occur with
+ *        `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
+ *    @arg @c kGTLRVision_DetectedBreak_Type_LineBreak Line break that ends a
+ *        paragraph. (Value: "LINE_BREAK")
+ *    @arg @c kGTLRVision_DetectedBreak_Type_Space Regular space. (Value:
+ *        "SPACE")
+ *    @arg @c kGTLRVision_DetectedBreak_Type_SureSpace Sure space (very wide).
+ *        (Value: "SURE_SPACE")
+ *    @arg @c kGTLRVision_DetectedBreak_Type_Unknown Unknown break label type.
+ *        (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  Detected language for a structural component.
+ */
+@interface GTLRVision_DetectedLanguage : GTLRObject
+
+/**
+ *  Confidence of detected language. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Set of dominant colors and their corresponding scores.
+ */
+@interface GTLRVision_DominantColorsAnnotation : GTLRObject
+
+/** RGB color values with their score and pixel fraction. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_ColorInfo *> *colors;
+
+@end
+
+
+/**
+ *  A generic empty message that you can re-use to avoid defining duplicated
+ *  empty messages in your APIs. A typical example is to use it as the request
+ *  or the response type of an API method. For instance:
+ *  service Foo {
+ *  rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ *  }
+ *  The JSON representation for `Empty` is empty JSON object `{}`.
+ */
+@interface GTLRVision_Empty : GTLRObject
+@end
+
+
+/**
+ *  Set of detected entity features.
+ */
+@interface GTLRVision_EntityAnnotation : GTLRObject
+
+/**
+ *  Image region to which this entity belongs. Not produced
+ *  for `LABEL_DETECTION` features.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/**
+ *  **Deprecated. Use `score` instead.**
+ *  The accuracy of the entity detection in an image.
+ *  For example, for an image in which the "Eiffel Tower" entity is detected,
+ *  this field represents the confidence that there is a tower in the query
+ *  image. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Entity textual description, expressed in its `locale` language.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The language code for the locale in which the entity textual
+ *  `description` is expressed.
+ */
+@property(nonatomic, copy, nullable) NSString *locale;
+
+/**
+ *  The location information for the detected entity. Multiple
+ *  `LocationInfo` elements can be present because one location may
+ *  indicate the location of the scene in the image, and another location
+ *  may indicate the location of the place where the image was taken.
+ *  Location information is usually present for landmarks.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_LocationInfo *> *locations;
+
+/**
+ *  Opaque entity ID. Some IDs may be available in
+ *  [Google Knowledge Graph Search
+ *  API](https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/**
+ *  Some entities may have optional user-supplied `Property` (name/value)
+ *  fields, such a score or string that qualifies the entity.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Property *> *properties;
+
+/**
+ *  Overall score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/**
+ *  The relevancy of the ICA (Image Content Annotation) label to the
+ *  image. For example, the relevancy of "tower" is likely higher to an image
+ *  containing the detected "Eiffel Tower" than to an image containing a
+ *  detected distant towering building, even though the confidence that
+ *  there is a tower in each image may be the same. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *topicality;
+
+@end
+
+
+/**
+ *  A face annotation object contains the results of face detection.
+ */
+@interface GTLRVision_FaceAnnotation : GTLRObject
+
+/**
+ *  Anger likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Likely It is likely
+ *        that the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *angerLikelihood;
+
+/**
+ *  Blurred likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Likely It is likely
+ *        that the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Possible It is
+ *        possible that the image belongs to the specified vertical. (Value:
+ *        "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Unlikely It is
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryUnlikely It is
+ *        very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *blurredLikelihood;
+
+/**
+ *  The bounding polygon around the face. The coordinates of the bounding box
+ *  are in the original image's scale.
+ *  The bounding box is computed to "frame" the face in accordance with human
+ *  expectations. It is based on the landmarker results.
+ *  Note that one or more x and/or y coordinates may not be generated in the
+ *  `BoundingPoly` (the polygon will be unbounded) if only a partial face
+ *  appears in the image to be annotated.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/**
+ *  Detection confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
+
+/**
+ *  The `fd_bounding_poly` bounding polygon is tighter than the
+ *  `boundingPoly`, and encloses only the skin part of the face. Typically, it
+ *  is used to eliminate the face from any image analysis that detects the
+ *  "amount of skin" visible in an image. It is not based on the
+ *  landmarker results, only on the initial face detection, hence
+ *  the <code>fd</code> (face detection) prefix.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *fdBoundingPoly;
+
+/**
+ *  Headwear likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Likely It is likely
+ *        that the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Possible It is
+ *        possible that the image belongs to the specified vertical. (Value:
+ *        "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unlikely It is
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryLikely It is
+ *        very likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryUnlikely It is
+ *        very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *headwearLikelihood;
+
+/**
+ *  Joy likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Likely It is likely that
+ *        the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *joyLikelihood;
+
+/**
+ *  Face landmarking confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
+
+/** Detected face landmarks. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Landmark *> *landmarks;
+
+/**
+ *  Yaw angle, which indicates the leftward/rightward angle that the face is
+ *  pointing relative to the vertical plane perpendicular to the image. Range
+ *  [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *panAngle;
+
+/**
+ *  Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
+ *  of the face relative to the image vertical about the axis perpendicular to
+ *  the face. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *rollAngle;
+
+/**
+ *  Sorrow likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Likely It is likely
+ *        that the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Possible It is
+ *        possible that the image belongs to the specified vertical. (Value:
+ *        "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Unlikely It is
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryUnlikely It is
+ *        very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
+
+/**
+ *  Surprise likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Likely It is likely
+ *        that the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Possible It is
+ *        possible that the image belongs to the specified vertical. (Value:
+ *        "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unlikely It is
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryLikely It is
+ *        very likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryUnlikely It is
+ *        very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
+
+/**
+ *  Pitch angle, which indicates the upwards/downwards angle that the face is
+ *  pointing relative to the image's horizontal plane. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *tiltAngle;
+
+/**
+ *  Under-exposed likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Likely It is
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Possible It is
+ *        possible that the image belongs to the specified vertical. (Value:
+ *        "POSSIBLE")
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unlikely It is
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "UNLIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryLikely It is
+ *        very likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryUnlikely It
+ *        is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
+
+@end
+
+
+/**
+ *  The type of Google Cloud Vision API detection to perform, and the maximum
+ *  number of results to return for that type. Multiple `Feature` objects can
+ *  be specified in the `features` list.
+ */
+@interface GTLRVision_Feature : GTLRObject
+
+/**
+ *  Maximum number of results of this type. Does not apply to
+ *  `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *maxResults;
+
+/**
+ *  Model to use for the feature.
+ *  Supported values: "builtin/stable" (the default if unset) and
+ *  "builtin/latest".
+ */
+@property(nonatomic, copy, nullable) NSString *model;
+
+/**
+ *  The feature type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_Feature_Type_CropHints Run crop hints. (Value:
+ *        "CROP_HINTS")
+ *    @arg @c kGTLRVision_Feature_Type_DocumentTextDetection Run dense text
+ *        document OCR. Takes precedence when both
+ *        `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. (Value:
+ *        "DOCUMENT_TEXT_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_FaceDetection Run face detection. (Value:
+ *        "FACE_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_ImageProperties Compute a set of image
+ *        properties, such as the
+ *        image's dominant colors. (Value: "IMAGE_PROPERTIES")
+ *    @arg @c kGTLRVision_Feature_Type_LabelDetection Run label detection.
+ *        (Value: "LABEL_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_LandmarkDetection Run landmark detection.
+ *        (Value: "LANDMARK_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_LogoDetection Run logo detection. (Value:
+ *        "LOGO_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_ObjectLocalization Run localizer for
+ *        object detection. (Value: "OBJECT_LOCALIZATION")
+ *    @arg @c kGTLRVision_Feature_Type_ProductSearch Run Product Search. (Value:
+ *        "PRODUCT_SEARCH")
+ *    @arg @c kGTLRVision_Feature_Type_SafeSearchDetection Run Safe Search to
+ *        detect potentially unsafe
+ *        or undesirable content. (Value: "SAFE_SEARCH_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_TextDetection Run text detection /
+ *        optical character recognition (OCR). Text detection
+ *        is optimized for areas of text within a larger image; if the image is
+ *        a document, use `DOCUMENT_TEXT_DETECTION` instead. (Value:
+ *        "TEXT_DETECTION")
+ *    @arg @c kGTLRVision_Feature_Type_TypeUnspecified Unspecified feature type.
+ *        (Value: "TYPE_UNSPECIFIED")
+ *    @arg @c kGTLRVision_Feature_Type_WebDetection Run web detection. (Value:
+ *        "WEB_DETECTION")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the output will be written to.
+ */
+@interface GTLRVision_GcsDestination : GTLRObject
+
+/**
+ *  Google Cloud Storage URI where the results will be stored. Results will
+ *  be in JSON format and preceded by its corresponding input URI. This field
+ *  can either represent a single file, or a prefix for multiple outputs.
+ *  Prefixes must end in a `/`.
+ *  Examples:
+ *  * File: gs://bucket-name/filename.json
+ *  * Prefix: gs://bucket-name/prefix/here/
+ *  * File: gs://bucket-name/prefix/here
+ *  If multiple outputs, each response is still AnnotateFileResponse, each of
+ *  which contains some subset of the full list of AnnotateImageResponse.
+ *  Multiple outputs can happen if, for example, the output JSON is too large
+ *  and overflows into multiple sharded files.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the input will be read from.
+ */
+@interface GTLRVision_GcsSource : GTLRObject
+
+/**
+ *  Google Cloud Storage URI for the input file. This must only be a
+ *  Google Cloud Storage object. Wildcards are not currently supported.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Response to a single file annotation request. A file may contain one or more
+ *  images, which individually have their own responses.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1AnnotateFileResponse : GTLRObject
+
+/** Information about the file for which this response is generated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1InputConfig *inputConfig;
+
+/** Individual responses to images found within the file. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse *> *responses;
+
+@end
+
+
+/**
+ *  Response to an image annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse : GTLRObject
+
+/**
+ *  If present, contextual information is needed to understand where this image
+ *  comes from.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext *context;
+
+/** If present, crop hints have completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation *cropHintsAnnotation;
+
+/**
+ *  If set, represents the error message for the operation.
+ *  Note that filled-in image annotations are guaranteed to be
+ *  correct, even when `error` is set.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Status *error;
+
+/** If present, face detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation *> *faceAnnotations;
+
+/**
+ *  If present, text (OCR) detection or document (OCR) text detection has
+ *  completed successfully.
+ *  This annotation provides the structural hierarchy for the OCR detected
+ *  text.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation *fullTextAnnotation;
+
+/** If present, image properties were extracted successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties *imagePropertiesAnnotation;
+
+/** If present, label detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *labelAnnotations;
+
+/** If present, landmark detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *landmarkAnnotations;
+
+/**
+ *  If present, localized object detection has completed successfully.
+ *  This will be sorted descending by confidence score.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
+
+/** If present, logo detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *logoAnnotations;
+
+/** If present, product search has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults *productSearchResults;
+
+/** If present, safe-search annotation has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation *safeSearchAnnotation;
+
+/** If present, text (OCR) detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *textAnnotations;
+
+/** If present, web detection has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1WebDetection *webDetection;
+
+@end
+
+
+/**
+ *  The response for a single offline file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse : GTLRObject
+
+/** The output location and metadata from AsyncAnnotateFileRequest. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig *outputConfig;
+
+@end
+
+
+/**
+ *  Response to an async batch file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1AsyncBatchAnnotateFilesResponse : GTLRObject
+
+/**
+ *  The list of file annotation responses, one for each request in
+ *  AsyncBatchAnnotateFilesRequest.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse *> *responses;
+
+@end
+
+
+/**
+ *  Logical element on the page.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Block : GTLRObject
+
+/**
+ *  Detected block type (text, image etc) for this block.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Barcode
+ *        Barcode block. (Value: "BARCODE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Picture
+ *        Image block. (Value: "PICTURE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Ruler
+ *        Horizontal/vertical line box. (Value: "RULER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Table Table
+ *        block. (Value: "TABLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Text Regular
+ *        text block. (Value: "TEXT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Unknown
+ *        Unknown block type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *blockType;
+
+/**
+ *  The bounding box for the block.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results on the block. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** List of paragraphs in this block (if this blocks is of type text). */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Paragraph *> *paragraphs;
+
+/** Additional information detected for the block. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
+
+@end
+
+
+/**
+ *  A bounding polygon for the detected image annotation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly : GTLRObject
+
+/** The bounding polygon normalized vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex *> *normalizedVertices;
+
+/** The bounding polygon vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Vertex *> *vertices;
+
+@end
+
+
+/**
+ *  Color information consists of RGB channels, score, and the fraction of
+ *  the image that the color occupies in the image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo : GTLRObject
+
+/** RGB components of the color. */
+@property(nonatomic, strong, nullable) GTLRVision_Color *color;
+
+/**
+ *  The fraction of pixels the color occupies in the image.
+ *  Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pixelFraction;
+
+/**
+ *  Image-specific score for this color. Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Single crop hint that is used to generate a new crop when serving an image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1CropHint : GTLRObject
+
+/**
+ *  The bounding polygon for the crop region. The coordinates of the bounding
+ *  box are in the original image's scale.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Confidence of this being a salient region. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Fraction of importance of this salient region with respect to the original
+ *  image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *importanceFraction;
+
+@end
+
+
+/**
+ *  Set of crop hints that are used to generate new crops when serving images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation : GTLRObject
+
+/** Crop hint results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1CropHint *> *cropHints;
+
+@end
+
+
+/**
+ *  Set of dominant colors and their corresponding scores.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation : GTLRObject
+
+/** RGB color values with their score and pixel fraction. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo *> *colors;
+
+@end
+
+
+/**
+ *  Set of detected entity features.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation : GTLRObject
+
+/**
+ *  Image region to which this entity belongs. Not produced
+ *  for `LABEL_DETECTION` features.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
+
+/**
+ *  **Deprecated. Use `score` instead.**
+ *  The accuracy of the entity detection in an image.
+ *  For example, for an image in which the "Eiffel Tower" entity is detected,
+ *  this field represents the confidence that there is a tower in the query
+ *  image. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Entity textual description, expressed in its `locale` language.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The language code for the locale in which the entity textual
+ *  `description` is expressed.
+ */
+@property(nonatomic, copy, nullable) NSString *locale;
+
+/**
+ *  The location information for the detected entity. Multiple
+ *  `LocationInfo` elements can be present because one location may
+ *  indicate the location of the scene in the image, and another location
+ *  may indicate the location of the place where the image was taken.
+ *  Location information is usually present for landmarks.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo *> *locations;
+
+/**
+ *  Opaque entity ID. Some IDs may be available in
+ *  [Google Knowledge Graph Search
+ *  API](https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/**
+ *  Some entities may have optional user-supplied `Property` (name/value)
+ *  fields, such a score or string that qualifies the entity.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Property *> *properties;
+
+/**
+ *  Overall score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/**
+ *  The relevancy of the ICA (Image Content Annotation) label to the
+ *  image. For example, the relevancy of "tower" is likely higher to an image
+ *  containing the detected "Eiffel Tower" than to an image containing a
+ *  detected distant towering building, even though the confidence that
+ *  there is a tower in each image may be the same. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *topicality;
+
+@end
+
+
+/**
+ *  A face annotation object contains the results of face detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation : GTLRObject
+
+/**
+ *  Anger likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *angerLikelihood;
+
+/**
+ *  Blurred likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *blurredLikelihood;
+
+/**
+ *  The bounding polygon around the face. The coordinates of the bounding box
+ *  are in the original image's scale.
+ *  The bounding box is computed to "frame" the face in accordance with human
+ *  expectations. It is based on the landmarker results.
+ *  Note that one or more x and/or y coordinates may not be generated in the
+ *  `BoundingPoly` (the polygon will be unbounded) if only a partial face
+ *  appears in the image to be annotated.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Detection confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
+
+/**
+ *  The `fd_bounding_poly` bounding polygon is tighter than the
+ *  `boundingPoly`, and encloses only the skin part of the face. Typically, it
+ *  is used to eliminate the face from any image analysis that detects the
+ *  "amount of skin" visible in an image. It is not based on the
+ *  landmarker results, only on the initial face detection, hence
+ *  the <code>fd</code> (face detection) prefix.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *fdBoundingPoly;
+
+/**
+ *  Headwear likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *headwearLikelihood;
+
+/**
+ *  Joy likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *joyLikelihood;
+
+/**
+ *  Face landmarking confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
+
+/** Detected face landmarks. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark *> *landmarks;
+
+/**
+ *  Yaw angle, which indicates the leftward/rightward angle that the face is
+ *  pointing relative to the vertical plane perpendicular to the image. Range
+ *  [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *panAngle;
+
+/**
+ *  Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
+ *  of the face relative to the image vertical about the axis perpendicular to
+ *  the face. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *rollAngle;
+
+/**
+ *  Sorrow likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
+
+/**
+ *  Surprise likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
+
+/**
+ *  Pitch angle, which indicates the upwards/downwards angle that the face is
+ *  pointing relative to the image's horizontal plane. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *tiltAngle;
+
+/**
+ *  Under-exposed likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
+
+@end
+
+
+/**
+ *  A face-specific landmark (for example, a face feature).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark : GTLRObject
+
+/** Face landmark position. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1Position *position;
+
+/**
+ *  Face landmark type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinGnathion
+ *        Chin gnathion. (Value: "CHIN_GNATHION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinLeftGonion
+ *        Chin left gonion. (Value: "CHIN_LEFT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinRightGonion
+ *        Chin right gonion. (Value: "CHIN_RIGHT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ForeheadGlabella
+ *        Forehead glabella. (Value: "FOREHEAD_GLABELLA")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEarTragion
+ *        Left ear tragion. (Value: "LEFT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEye
+ *        Left eye. (Value: "LEFT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
+ *        Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
+ *        Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
+ *        Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyePupil
+ *        Left eye pupil. (Value: "LEFT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
+ *        Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
+ *        Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
+ *        Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
+ *        Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LowerLip
+ *        Lower lip. (Value: "LOWER_LIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
+ *        Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthCenter
+ *        Mouth center. (Value: "MOUTH_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthLeft
+ *        Mouth left. (Value: "MOUTH_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthRight
+ *        Mouth right. (Value: "MOUTH_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomCenter
+ *        Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomLeft
+ *        Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomRight
+ *        Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseTip
+ *        Nose tip. (Value: "NOSE_TIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEarTragion
+ *        Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEye
+ *        Right eye. (Value: "RIGHT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
+ *        Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
+ *        Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
+ *        Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyePupil
+ *        Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
+ *        Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
+ *        Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
+ *        Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
+ *        Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UnknownLandmark
+ *        Unknown face landmark detected. Should not be filled. (Value:
+ *        "UNKNOWN_LANDMARK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UpperLip
+ *        Upper lip. (Value: "UPPER_LIP")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the output will be written to.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination : GTLRObject
+
+/**
+ *  Google Cloud Storage URI where the results will be stored. Results will
+ *  be in JSON format and preceded by its corresponding input URI. This field
+ *  can either represent a single file, or a prefix for multiple outputs.
+ *  Prefixes must end in a `/`.
+ *  Examples:
+ *  * File: gs://bucket-name/filename.json
+ *  * Prefix: gs://bucket-name/prefix/here/
+ *  * File: gs://bucket-name/prefix/here
+ *  If multiple outputs, each response is still AnnotateFileResponse, each of
+ *  which contains some subset of the full list of AnnotateImageResponse.
+ *  Multiple outputs can happen if, for example, the output JSON is too large
+ *  and overflows into multiple sharded files.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the input will be read from.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1GcsSource : GTLRObject
+
+/**
+ *  Google Cloud Storage URI for the input file. This must only be a
+ *  Google Cloud Storage object. Wildcards are not currently supported.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  If an image was produced from a file (e.g. a PDF), this message gives
+ *  information about the source of that image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext : GTLRObject
+
+/**
+ *  If the file was a PDF or TIFF, this field gives the page number within
+ *  the file used to produce the image.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pageNumber;
+
+/** The URI of the file used to produce the image. */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Stores image properties, such as dominant colors.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties : GTLRObject
+
+/** If present, dominant colors completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation *dominantColors;
+
+@end
+
+
+/**
+ *  The desired input location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1InputConfig : GTLRObject
+
+/** The Google Cloud Storage location to read the input from. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1GcsSource *gcsSource;
+
+/**
+ *  The type of the file. Currently only "application/pdf" and "image/tiff"
+ *  are supported. Wildcards are not supported.
+ */
+@property(nonatomic, copy, nullable) NSString *mimeType;
+
+@end
+
+
+/**
+ *  Set of detected objects with bounding boxes.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation : GTLRObject
+
+/** Image region to which this object belongs. This must be populated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+/** Object ID that should align with EntityAnnotation mid. */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/** Object name, expressed in its `language_code` language. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Detected entity location information.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo : GTLRObject
+
+/** lat/long location coordinates. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the normalized vertex coordinates are relative to the original image
+ *  and range from 0 to 1.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Contains metadata for the BatchAnnotateImages operation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata : GTLRObject
+
+/** The time when the batch request was received. */
+@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
+
+/**
+ *  Current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Cancelled
+ *        The batch processing was cancelled. (Value: "CANCELLED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Created
+ *        Request is received. (Value: "CREATED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Done
+ *        The batch processing is done. (Value: "DONE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Running
+ *        Request is actively being processed. (Value: "RUNNING")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_StateUnspecified
+ *        Invalid. (Value: "STATE_UNSPECIFIED")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the operation result was last updated. */
+@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
+
+@end
+
+
+/**
+ *  The desired output location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig : GTLRObject
+
+/**
+ *  The max number of response protos to put into each output JSON file on
+ *  Google Cloud Storage.
+ *  The valid range is [1, 100]. If not specified, the default value is 20.
+ *  For example, for one pdf file with 100 pages, 100 response protos will
+ *  be generated. If `batch_size` = 20, then 5 json files each
+ *  containing 20 response protos will be written under the prefix
+ *  `gcs_destination`.`uri`.
+ *  Currently, batch_size only applies to GcsDestination, with potential future
+ *  support for other output configurations.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *batchSize;
+
+/** The Google Cloud Storage location to write the output(s) to. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination *gcsDestination;
+
+@end
+
+
+/**
+ *  Detected page from OCR.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Page : GTLRObject
+
+/** List of blocks of text, images etc on this page. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Block *> *blocks;
+
+/**
+ *  Confidence of the OCR results on the page. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Page height. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *height;
+
+/** Additional information detected on the page. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
+
+/**
+ *  Page width. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *width;
+
+@end
+
+
+/**
+ *  Structural unit of text representing a number of words in certain order.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Paragraph : GTLRObject
+
+/**
+ *  The bounding box for the paragraph.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the paragraph. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the paragraph. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
+
+/** List of words in this paragraph. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Word *> *words;
+
+@end
+
+
+/**
+ *  A 3D position in the image, used primarily for Face detection landmarks.
+ *  A valid Position must have both x and y coordinates.
+ *  The position coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Position : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+/**
+ *  Z coordinate (or depth).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *z;
+
+@end
+
+
+/**
+ *  A Product contains ReferenceImages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Product : GTLRObject
+
+/**
+ *  User-provided metadata to be stored with this product. Must be at most 4096
+ *  characters long.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The user-provided name for this Product. Must not be empty. Must be at most
+ *  4096 characters long.
+ */
+@property(nonatomic, copy, nullable) NSString *displayName;
+
+/**
+ *  The resource name of the product.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *  This field is ignored when creating a product.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The category for the product identified by the reference image. This should
+ *  be either "homegoods", "apparel", or "toys".
+ *  This field is immutable.
+ */
+@property(nonatomic, copy, nullable) NSString *productCategory;
+
+/**
+ *  Key-value pairs that can be attached to a product. At query time,
+ *  constraints can be specified based on the product_labels.
+ *  Note that integer values can be provided as strings, e.g. "1199". Only
+ *  strings with integer values can match a range-based restriction which is
+ *  to be supported soon.
+ *  Multiple values can be assigned to the same key. One product may have up to
+ *  100 product_labels.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue *> *productLabels;
+
+@end
+
+
+/**
+ *  A product label represented as a key-value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue : GTLRObject
+
+/**
+ *  The key of the label attached to the product. Cannot be empty and cannot
+ *  exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *key;
+
+/**
+ *  The value of the label attached to the product. Cannot be empty and
+ *  cannot exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  Results for a product search request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults : GTLRObject
+
+/**
+ *  Timestamp of the index which provided these results. Changes made after
+ *  this time are not reflected in the current results.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
+
+/**
+ *  List of results grouped by products detected in the query image. Each entry
+ *  corresponds to one bounding polygon in the query image, and contains the
+ *  matching products specific to that region. There may be duplicate product
+ *  matches in the union of all the per-product results.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about the products similar to a single product in a query
+ *  image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult : GTLRObject
+
+/** The bounding polygon around the product detected in the query image. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about a product.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult : GTLRObject
+
+/**
+ *  The resource name of the image from the product that is the closest match
+ *  to the query.
+ */
+@property(nonatomic, copy, nullable) NSString *image;
+
+/** The Product. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1Product *product;
+
+/**
+ *  A confidence level on the match, ranging from 0 (no confidence) to
+ *  1 (full confidence).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  A `Property` consists of a user-supplied name/value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Property : GTLRObject
+
+/** Name of the property. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Value of numeric properties.
+ *
+ *  Uses NSNumber of unsignedLongLongValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *uint64Value;
+
+/** Value of the property. */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  Set of features pertaining to the image, computed by computer vision
+ *  methods over safe-search verticals (for example, adult, spoof, medical,
+ *  violence).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation : GTLRObject
+
+/**
+ *  Represents the adult content likelihood for the image. Adult content may
+ *  contain elements such as nudity, pornographic images or cartoons, or
+ *  sexual activities.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *adult;
+
+/**
+ *  Likelihood that this is a medical image.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *medical;
+
+/**
+ *  Likelihood that the request image contains racy content. Racy content may
+ *  include (but is not limited to) skimpy or sheer clothing, strategically
+ *  covered nudity, lewd or provocative poses, or close-ups of sensitive
+ *  body areas.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *racy;
+
+/**
+ *  Spoof likelihood. The likelihood that an modification
+ *  was made to the image's canonical version to make it appear
+ *  funny or offensive.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *spoof;
+
+/**
+ *  Likelihood that this image contains violent content.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *violence;
+
+@end
+
+
+/**
+ *  A single symbol representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Symbol : GTLRObject
+
+/**
+ *  The bounding box for the symbol.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertice order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the symbol. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the symbol. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
+
+/** The actual UTF-8 representation of the symbol. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  TextAnnotation contains a structured representation of OCR extracted text.
+ *  The hierarchy of an OCR extracted text structure is like this:
+ *  TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
+ *  Each structural component, starting from Page, may further have their own
+ *  properties. Properties describe detected languages, breaks etc.. Please
+ *  refer
+ *  to the TextAnnotation.TextProperty message definition below for more
+ *  detail.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation : GTLRObject
+
+/** List of pages detected by OCR. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Page *> *pages;
+
+/** UTF-8 text detected on the pages. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  Detected start or end of a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak : GTLRObject
+
+/**
+ *  True if break prepends the element.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *isPrefix;
+
+/**
+ *  Detected break type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_EolSureSpace
+ *        Line-wrapping break. (Value: "EOL_SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Hyphen
+ *        End-line hyphen that is not present in text; does not co-occur with
+ *        `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_LineBreak
+ *        Line break that ends a paragraph. (Value: "LINE_BREAK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Space
+ *        Regular space. (Value: "SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_SureSpace
+ *        Sure space (very wide). (Value: "SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Unknown
+ *        Unknown break label type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  Detected language for a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage : GTLRObject
+
+/**
+ *  Confidence of detected language. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Additional information detected on the structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty : GTLRObject
+
+/** Detected start or end of a text segment. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak *detectedBreak;
+
+/** A list of detected languages together with confidence. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the vertex coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Vertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Relevant information for the image from the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetection : GTLRObject
+
+/**
+ *  The service's best guess as to the topic of the request image.
+ *  Inferred from similar images on the open web.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel *> *bestGuessLabels;
+
+/**
+ *  Fully matching images from the Internet.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Web pages containing the matching images from the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage *> *pagesWithMatchingImages;
+
+/**
+ *  Partial matching images from the Internet.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/** The visually similar image results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *visuallySimilarImages;
+
+/** Deduced entities from similar images on the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity *> *webEntities;
+
+@end
+
+
+/**
+ *  Entity deduced from similar images on the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity : GTLRObject
+
+/**
+ *  Canonical description of the entity, in English.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/** Opaque entity ID. */
+@property(nonatomic, copy, nullable) NSString *entityId;
+
+/**
+ *  Overall relevancy score for the entity.
+ *  Not normalized and not comparable across different image queries.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Metadata for online images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage : GTLRObject
+
+/**
+ *  (Deprecated) Overall relevancy score for the image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result image URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  Label to provide extra metadata for the web detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel : GTLRObject
+
+/** Label for extra metadata. */
+@property(nonatomic, copy, nullable) NSString *label;
+
+/**
+ *  The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
+ *  For more information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Metadata for web pages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage : GTLRObject
+
+/**
+ *  Fully matching images on the page.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Title for the web page, may contain HTML markups. */
+@property(nonatomic, copy, nullable) NSString *pageTitle;
+
+/**
+ *  Partial matching images on the page.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its
+ *  crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/**
+ *  (Deprecated) Overall relevancy score for the web page.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result web page URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  A word representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p1beta1Word : GTLRObject
+
+/**
+ *  The bounding box for the word.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the word. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the word. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
+
+/**
+ *  List of symbols in the word.
+ *  The order of the symbols follows the natural reading order.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Symbol *> *symbols;
+
+@end
+
+
+/**
+ *  Response to a single file annotation request. A file may contain one or more
+ *  images, which individually have their own responses.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1AnnotateFileResponse : GTLRObject
+
+/** Information about the file for which this response is generated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1InputConfig *inputConfig;
+
+/** Individual responses to images found within the file. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse *> *responses;
+
+@end
+
+
+/**
+ *  Response to an image annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse : GTLRObject
+
+/**
+ *  If present, contextual information is needed to understand where this image
+ *  comes from.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext *context;
+
+/** If present, crop hints have completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation *cropHintsAnnotation;
+
+/**
+ *  If set, represents the error message for the operation.
+ *  Note that filled-in image annotations are guaranteed to be
+ *  correct, even when `error` is set.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Status *error;
+
+/** If present, face detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation *> *faceAnnotations;
+
+/**
+ *  If present, text (OCR) detection or document (OCR) text detection has
+ *  completed successfully.
+ *  This annotation provides the structural hierarchy for the OCR detected
+ *  text.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation *fullTextAnnotation;
+
+/** If present, image properties were extracted successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties *imagePropertiesAnnotation;
+
+/** If present, label detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *labelAnnotations;
+
+/** If present, landmark detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *landmarkAnnotations;
+
+/**
+ *  If present, localized object detection has completed successfully.
+ *  This will be sorted descending by confidence score.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
+
+/** If present, logo detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *logoAnnotations;
+
+/** If present, product search has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults *productSearchResults;
+
+/** If present, safe-search annotation has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation *safeSearchAnnotation;
+
+/** If present, text (OCR) detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *textAnnotations;
+
+/** If present, web detection has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1WebDetection *webDetection;
+
+@end
+
+
+/**
+ *  The response for a single offline file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse : GTLRObject
+
+/** The output location and metadata from AsyncAnnotateFileRequest. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig *outputConfig;
+
+@end
+
+
+/**
+ *  Response to an async batch file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1AsyncBatchAnnotateFilesResponse : GTLRObject
+
+/**
+ *  The list of file annotation responses, one for each request in
+ *  AsyncBatchAnnotateFilesRequest.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse *> *responses;
+
+@end
+
+
+/**
+ *  Logical element on the page.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Block : GTLRObject
+
+/**
+ *  Detected block type (text, image etc) for this block.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Barcode
+ *        Barcode block. (Value: "BARCODE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Picture
+ *        Image block. (Value: "PICTURE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Ruler
+ *        Horizontal/vertical line box. (Value: "RULER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Table Table
+ *        block. (Value: "TABLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Text Regular
+ *        text block. (Value: "TEXT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Unknown
+ *        Unknown block type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *blockType;
+
+/**
+ *  The bounding box for the block.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results on the block. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** List of paragraphs in this block (if this blocks is of type text). */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Paragraph *> *paragraphs;
+
+/** Additional information detected for the block. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
+
+@end
+
+
+/**
+ *  A bounding polygon for the detected image annotation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly : GTLRObject
+
+/** The bounding polygon normalized vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex *> *normalizedVertices;
+
+/** The bounding polygon vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Vertex *> *vertices;
+
+@end
+
+
+/**
+ *  Color information consists of RGB channels, score, and the fraction of
+ *  the image that the color occupies in the image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo : GTLRObject
+
+/** RGB components of the color. */
+@property(nonatomic, strong, nullable) GTLRVision_Color *color;
+
+/**
+ *  The fraction of pixels the color occupies in the image.
+ *  Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pixelFraction;
+
+/**
+ *  Image-specific score for this color. Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Single crop hint that is used to generate a new crop when serving an image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1CropHint : GTLRObject
+
+/**
+ *  The bounding polygon for the crop region. The coordinates of the bounding
+ *  box are in the original image's scale.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Confidence of this being a salient region. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Fraction of importance of this salient region with respect to the original
+ *  image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *importanceFraction;
+
+@end
+
+
+/**
+ *  Set of crop hints that are used to generate new crops when serving images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation : GTLRObject
+
+/** Crop hint results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1CropHint *> *cropHints;
+
+@end
+
+
+/**
+ *  Set of dominant colors and their corresponding scores.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation : GTLRObject
+
+/** RGB color values with their score and pixel fraction. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo *> *colors;
+
+@end
+
+
+/**
+ *  Set of detected entity features.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation : GTLRObject
+
+/**
+ *  Image region to which this entity belongs. Not produced
+ *  for `LABEL_DETECTION` features.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
+
+/**
+ *  **Deprecated. Use `score` instead.**
+ *  The accuracy of the entity detection in an image.
+ *  For example, for an image in which the "Eiffel Tower" entity is detected,
+ *  this field represents the confidence that there is a tower in the query
+ *  image. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Entity textual description, expressed in its `locale` language.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The language code for the locale in which the entity textual
+ *  `description` is expressed.
+ */
+@property(nonatomic, copy, nullable) NSString *locale;
+
+/**
+ *  The location information for the detected entity. Multiple
+ *  `LocationInfo` elements can be present because one location may
+ *  indicate the location of the scene in the image, and another location
+ *  may indicate the location of the place where the image was taken.
+ *  Location information is usually present for landmarks.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo *> *locations;
+
+/**
+ *  Opaque entity ID. Some IDs may be available in
+ *  [Google Knowledge Graph Search
+ *  API](https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/**
+ *  Some entities may have optional user-supplied `Property` (name/value)
+ *  fields, such a score or string that qualifies the entity.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Property *> *properties;
+
+/**
+ *  Overall score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/**
+ *  The relevancy of the ICA (Image Content Annotation) label to the
+ *  image. For example, the relevancy of "tower" is likely higher to an image
+ *  containing the detected "Eiffel Tower" than to an image containing a
+ *  detected distant towering building, even though the confidence that
+ *  there is a tower in each image may be the same. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *topicality;
+
+@end
+
+
+/**
+ *  A face annotation object contains the results of face detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation : GTLRObject
+
+/**
+ *  Anger likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *angerLikelihood;
+
+/**
+ *  Blurred likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *blurredLikelihood;
+
+/**
+ *  The bounding polygon around the face. The coordinates of the bounding box
+ *  are in the original image's scale.
+ *  The bounding box is computed to "frame" the face in accordance with human
+ *  expectations. It is based on the landmarker results.
+ *  Note that one or more x and/or y coordinates may not be generated in the
+ *  `BoundingPoly` (the polygon will be unbounded) if only a partial face
+ *  appears in the image to be annotated.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Detection confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
+
+/**
+ *  The `fd_bounding_poly` bounding polygon is tighter than the
+ *  `boundingPoly`, and encloses only the skin part of the face. Typically, it
+ *  is used to eliminate the face from any image analysis that detects the
+ *  "amount of skin" visible in an image. It is not based on the
+ *  landmarker results, only on the initial face detection, hence
+ *  the <code>fd</code> (face detection) prefix.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *fdBoundingPoly;
+
+/**
+ *  Headwear likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *headwearLikelihood;
+
+/**
+ *  Joy likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *joyLikelihood;
+
+/**
+ *  Face landmarking confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
+
+/** Detected face landmarks. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark *> *landmarks;
+
+/**
+ *  Yaw angle, which indicates the leftward/rightward angle that the face is
+ *  pointing relative to the vertical plane perpendicular to the image. Range
+ *  [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *panAngle;
+
+/**
+ *  Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
+ *  of the face relative to the image vertical about the axis perpendicular to
+ *  the face. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *rollAngle;
+
+/**
+ *  Sorrow likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
+
+/**
+ *  Surprise likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
+
+/**
+ *  Pitch angle, which indicates the upwards/downwards angle that the face is
+ *  pointing relative to the image's horizontal plane. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *tiltAngle;
+
+/**
+ *  Under-exposed likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
+
+@end
+
+
+/**
+ *  A face-specific landmark (for example, a face feature).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark : GTLRObject
+
+/** Face landmark position. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1Position *position;
+
+/**
+ *  Face landmark type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinGnathion
+ *        Chin gnathion. (Value: "CHIN_GNATHION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinLeftGonion
+ *        Chin left gonion. (Value: "CHIN_LEFT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinRightGonion
+ *        Chin right gonion. (Value: "CHIN_RIGHT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ForeheadGlabella
+ *        Forehead glabella. (Value: "FOREHEAD_GLABELLA")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEarTragion
+ *        Left ear tragion. (Value: "LEFT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEye
+ *        Left eye. (Value: "LEFT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
+ *        Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
+ *        Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
+ *        Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyePupil
+ *        Left eye pupil. (Value: "LEFT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
+ *        Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
+ *        Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
+ *        Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
+ *        Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LowerLip
+ *        Lower lip. (Value: "LOWER_LIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
+ *        Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthCenter
+ *        Mouth center. (Value: "MOUTH_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthLeft
+ *        Mouth left. (Value: "MOUTH_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthRight
+ *        Mouth right. (Value: "MOUTH_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomCenter
+ *        Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomLeft
+ *        Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomRight
+ *        Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseTip
+ *        Nose tip. (Value: "NOSE_TIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEarTragion
+ *        Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEye
+ *        Right eye. (Value: "RIGHT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
+ *        Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
+ *        Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
+ *        Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyePupil
+ *        Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
+ *        Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
+ *        Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
+ *        Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
+ *        Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UnknownLandmark
+ *        Unknown face landmark detected. Should not be filled. (Value:
+ *        "UNKNOWN_LANDMARK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UpperLip
+ *        Upper lip. (Value: "UPPER_LIP")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the output will be written to.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination : GTLRObject
+
+/**
+ *  Google Cloud Storage URI where the results will be stored. Results will
+ *  be in JSON format and preceded by its corresponding input URI. This field
+ *  can either represent a single file, or a prefix for multiple outputs.
+ *  Prefixes must end in a `/`.
+ *  Examples:
+ *  * File: gs://bucket-name/filename.json
+ *  * Prefix: gs://bucket-name/prefix/here/
+ *  * File: gs://bucket-name/prefix/here
+ *  If multiple outputs, each response is still AnnotateFileResponse, each of
+ *  which contains some subset of the full list of AnnotateImageResponse.
+ *  Multiple outputs can happen if, for example, the output JSON is too large
+ *  and overflows into multiple sharded files.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the input will be read from.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1GcsSource : GTLRObject
+
+/**
+ *  Google Cloud Storage URI for the input file. This must only be a
+ *  Google Cloud Storage object. Wildcards are not currently supported.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  If an image was produced from a file (e.g. a PDF), this message gives
+ *  information about the source of that image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext : GTLRObject
+
+/**
+ *  If the file was a PDF or TIFF, this field gives the page number within
+ *  the file used to produce the image.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pageNumber;
+
+/** The URI of the file used to produce the image. */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Stores image properties, such as dominant colors.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties : GTLRObject
+
+/** If present, dominant colors completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation *dominantColors;
+
+@end
+
+
+/**
+ *  The desired input location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1InputConfig : GTLRObject
+
+/** The Google Cloud Storage location to read the input from. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1GcsSource *gcsSource;
+
+/**
+ *  The type of the file. Currently only "application/pdf" and "image/tiff"
+ *  are supported. Wildcards are not supported.
+ */
+@property(nonatomic, copy, nullable) NSString *mimeType;
+
+@end
+
+
+/**
+ *  Set of detected objects with bounding boxes.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation : GTLRObject
+
+/** Image region to which this object belongs. This must be populated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+/** Object ID that should align with EntityAnnotation mid. */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/** Object name, expressed in its `language_code` language. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Detected entity location information.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo : GTLRObject
+
+/** lat/long location coordinates. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the normalized vertex coordinates are relative to the original image
+ *  and range from 0 to 1.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Contains metadata for the BatchAnnotateImages operation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata : GTLRObject
+
+/** The time when the batch request was received. */
+@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
+
+/**
+ *  Current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Cancelled
+ *        The batch processing was cancelled. (Value: "CANCELLED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Created
+ *        Request is received. (Value: "CREATED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Done
+ *        The batch processing is done. (Value: "DONE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Running
+ *        Request is actively being processed. (Value: "RUNNING")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_StateUnspecified
+ *        Invalid. (Value: "STATE_UNSPECIFIED")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the operation result was last updated. */
+@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
+
+@end
+
+
+/**
+ *  The desired output location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig : GTLRObject
+
+/**
+ *  The max number of response protos to put into each output JSON file on
+ *  Google Cloud Storage.
+ *  The valid range is [1, 100]. If not specified, the default value is 20.
+ *  For example, for one pdf file with 100 pages, 100 response protos will
+ *  be generated. If `batch_size` = 20, then 5 json files each
+ *  containing 20 response protos will be written under the prefix
+ *  `gcs_destination`.`uri`.
+ *  Currently, batch_size only applies to GcsDestination, with potential future
+ *  support for other output configurations.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *batchSize;
+
+/** The Google Cloud Storage location to write the output(s) to. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination *gcsDestination;
+
+@end
+
+
+/**
+ *  Detected page from OCR.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Page : GTLRObject
+
+/** List of blocks of text, images etc on this page. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Block *> *blocks;
+
+/**
+ *  Confidence of the OCR results on the page. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Page height. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *height;
+
+/** Additional information detected on the page. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
+
+/**
+ *  Page width. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *width;
+
+@end
+
+
+/**
+ *  Structural unit of text representing a number of words in certain order.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Paragraph : GTLRObject
+
+/**
+ *  The bounding box for the paragraph.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the paragraph. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the paragraph. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
+
+/** List of words in this paragraph. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Word *> *words;
+
+@end
+
+
+/**
+ *  A 3D position in the image, used primarily for Face detection landmarks.
+ *  A valid Position must have both x and y coordinates.
+ *  The position coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Position : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+/**
+ *  Z coordinate (or depth).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *z;
+
+@end
+
+
+/**
+ *  A Product contains ReferenceImages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Product : GTLRObject
+
+/**
+ *  User-provided metadata to be stored with this product. Must be at most 4096
+ *  characters long.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The user-provided name for this Product. Must not be empty. Must be at most
+ *  4096 characters long.
+ */
+@property(nonatomic, copy, nullable) NSString *displayName;
+
+/**
+ *  The resource name of the product.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *  This field is ignored when creating a product.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The category for the product identified by the reference image. This should
+ *  be either "homegoods", "apparel", or "toys".
+ *  This field is immutable.
+ */
+@property(nonatomic, copy, nullable) NSString *productCategory;
+
+/**
+ *  Key-value pairs that can be attached to a product. At query time,
+ *  constraints can be specified based on the product_labels.
+ *  Note that integer values can be provided as strings, e.g. "1199". Only
+ *  strings with integer values can match a range-based restriction which is
+ *  to be supported soon.
+ *  Multiple values can be assigned to the same key. One product may have up to
+ *  100 product_labels.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue *> *productLabels;
+
+@end
+
+
+/**
+ *  A product label represented as a key-value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue : GTLRObject
+
+/**
+ *  The key of the label attached to the product. Cannot be empty and cannot
+ *  exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *key;
+
+/**
+ *  The value of the label attached to the product. Cannot be empty and
+ *  cannot exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  Results for a product search request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults : GTLRObject
+
+/**
+ *  Timestamp of the index which provided these results. Changes made after
+ *  this time are not reflected in the current results.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
+
+/**
+ *  List of results grouped by products detected in the query image. Each entry
+ *  corresponds to one bounding polygon in the query image, and contains the
+ *  matching products specific to that region. There may be duplicate product
+ *  matches in the union of all the per-product results.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about the products similar to a single product in a query
+ *  image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult : GTLRObject
+
+/** The bounding polygon around the product detected in the query image. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about a product.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult : GTLRObject
+
+/**
+ *  The resource name of the image from the product that is the closest match
+ *  to the query.
+ */
+@property(nonatomic, copy, nullable) NSString *image;
+
+/** The Product. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1Product *product;
+
+/**
+ *  A confidence level on the match, ranging from 0 (no confidence) to
+ *  1 (full confidence).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  A `Property` consists of a user-supplied name/value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Property : GTLRObject
+
+/** Name of the property. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Value of numeric properties.
+ *
+ *  Uses NSNumber of unsignedLongLongValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *uint64Value;
+
+/** Value of the property. */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  Set of features pertaining to the image, computed by computer vision
+ *  methods over safe-search verticals (for example, adult, spoof, medical,
+ *  violence).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation : GTLRObject
+
+/**
+ *  Represents the adult content likelihood for the image. Adult content may
+ *  contain elements such as nudity, pornographic images or cartoons, or
+ *  sexual activities.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *adult;
+
+/**
+ *  Likelihood that this is a medical image.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *medical;
+
+/**
+ *  Likelihood that the request image contains racy content. Racy content may
+ *  include (but is not limited to) skimpy or sheer clothing, strategically
+ *  covered nudity, lewd or provocative poses, or close-ups of sensitive
+ *  body areas.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *racy;
+
+/**
+ *  Spoof likelihood. The likelihood that an modification
+ *  was made to the image's canonical version to make it appear
+ *  funny or offensive.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *spoof;
+
+/**
+ *  Likelihood that this image contains violent content.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *violence;
+
+@end
+
+
+/**
+ *  A single symbol representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Symbol : GTLRObject
+
+/**
+ *  The bounding box for the symbol.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertice order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the symbol. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the symbol. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
+
+/** The actual UTF-8 representation of the symbol. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  TextAnnotation contains a structured representation of OCR extracted text.
+ *  The hierarchy of an OCR extracted text structure is like this:
+ *  TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
+ *  Each structural component, starting from Page, may further have their own
+ *  properties. Properties describe detected languages, breaks etc.. Please
+ *  refer
+ *  to the TextAnnotation.TextProperty message definition below for more
+ *  detail.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation : GTLRObject
+
+/** List of pages detected by OCR. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Page *> *pages;
+
+/** UTF-8 text detected on the pages. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  Detected start or end of a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak : GTLRObject
+
+/**
+ *  True if break prepends the element.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *isPrefix;
+
+/**
+ *  Detected break type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_EolSureSpace
+ *        Line-wrapping break. (Value: "EOL_SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Hyphen
+ *        End-line hyphen that is not present in text; does not co-occur with
+ *        `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_LineBreak
+ *        Line break that ends a paragraph. (Value: "LINE_BREAK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Space
+ *        Regular space. (Value: "SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_SureSpace
+ *        Sure space (very wide). (Value: "SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Unknown
+ *        Unknown break label type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  Detected language for a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage : GTLRObject
+
+/**
+ *  Confidence of detected language. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Additional information detected on the structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty : GTLRObject
+
+/** Detected start or end of a text segment. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak *detectedBreak;
+
+/** A list of detected languages together with confidence. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the vertex coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Vertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Relevant information for the image from the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetection : GTLRObject
+
+/**
+ *  The service's best guess as to the topic of the request image.
+ *  Inferred from similar images on the open web.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel *> *bestGuessLabels;
+
+/**
+ *  Fully matching images from the Internet.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Web pages containing the matching images from the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage *> *pagesWithMatchingImages;
+
+/**
+ *  Partial matching images from the Internet.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/** The visually similar image results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *visuallySimilarImages;
+
+/** Deduced entities from similar images on the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity *> *webEntities;
+
+@end
+
+
+/**
+ *  Entity deduced from similar images on the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity : GTLRObject
+
+/**
+ *  Canonical description of the entity, in English.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/** Opaque entity ID. */
+@property(nonatomic, copy, nullable) NSString *entityId;
+
+/**
+ *  Overall relevancy score for the entity.
+ *  Not normalized and not comparable across different image queries.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Metadata for online images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage : GTLRObject
+
+/**
+ *  (Deprecated) Overall relevancy score for the image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result image URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  Label to provide extra metadata for the web detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel : GTLRObject
+
+/** Label for extra metadata. */
+@property(nonatomic, copy, nullable) NSString *label;
+
+/**
+ *  The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
+ *  For more information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Metadata for web pages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage : GTLRObject
+
+/**
+ *  Fully matching images on the page.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Title for the web page, may contain HTML markups. */
+@property(nonatomic, copy, nullable) NSString *pageTitle;
+
+/**
+ *  Partial matching images on the page.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its
+ *  crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/**
+ *  (Deprecated) Overall relevancy score for the web page.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result web page URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  A word representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p2beta1Word : GTLRObject
+
+/**
+ *  The bounding box for the word.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the word. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the word. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
+
+/**
+ *  List of symbols in the word.
+ *  The order of the symbols follows the natural reading order.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Symbol *> *symbols;
+
+@end
+
+
+/**
+ *  Response to a single file annotation request. A file may contain one or more
+ *  images, which individually have their own responses.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1AnnotateFileResponse : GTLRObject
+
+/** Information about the file for which this response is generated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1InputConfig *inputConfig;
+
+/** Individual responses to images found within the file. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse *> *responses;
+
+@end
+
+
+/**
+ *  Response to an image annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse : GTLRObject
+
+/**
+ *  If present, contextual information is needed to understand where this image
+ *  comes from.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext *context;
+
+/** If present, crop hints have completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation *cropHintsAnnotation;
+
+/**
+ *  If set, represents the error message for the operation.
+ *  Note that filled-in image annotations are guaranteed to be
+ *  correct, even when `error` is set.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Status *error;
+
+/** If present, face detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation *> *faceAnnotations;
+
+/**
+ *  If present, text (OCR) detection or document (OCR) text detection has
+ *  completed successfully.
+ *  This annotation provides the structural hierarchy for the OCR detected
+ *  text.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation *fullTextAnnotation;
+
+/** If present, image properties were extracted successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties *imagePropertiesAnnotation;
+
+/** If present, label detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *labelAnnotations;
+
+/** If present, landmark detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *landmarkAnnotations;
+
+/**
+ *  If present, localized object detection has completed successfully.
+ *  This will be sorted descending by confidence score.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
+
+/** If present, logo detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *logoAnnotations;
+
+/** If present, product search has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults *productSearchResults;
+
+/** If present, safe-search annotation has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation *safeSearchAnnotation;
+
+/** If present, text (OCR) detection has completed successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *textAnnotations;
+
+/** If present, web detection has completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1WebDetection *webDetection;
+
+@end
+
+
+/**
+ *  The response for a single offline file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse : GTLRObject
+
+/** The output location and metadata from AsyncAnnotateFileRequest. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig *outputConfig;
+
+@end
+
+
+/**
+ *  Response to an async batch file annotation request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1AsyncBatchAnnotateFilesResponse : GTLRObject
+
+/**
+ *  The list of file annotation responses, one for each request in
+ *  AsyncBatchAnnotateFilesRequest.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse *> *responses;
+
+@end
+
+
+/**
+ *  Metadata for the batch operations such as the current state.
+ *  This is included in the `metadata` field of the `Operation` returned by the
+ *  `GetOperation` call of the `google::longrunning::Operations` service.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata : GTLRObject
+
+/**
+ *  The time when the batch request is finished and
+ *  google.longrunning.Operation.done is set to true.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *endTime;
+
+/**
+ *  The current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Cancelled
+ *        The request is done after the longrunning.Operations.CancelOperation
+ *        has
+ *        been called by the user. Any records that were processed before the
+ *        cancel command are output as specified in the request. (Value:
+ *        "CANCELLED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Failed
+ *        The request is done and no item has been successfully processed.
+ *        (Value: "FAILED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Processing
+ *        Request is actively being processed. (Value: "PROCESSING")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_StateUnspecified
+ *        Invalid. (Value: "STATE_UNSPECIFIED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Successful
+ *        The request is done and at least one item has been successfully
+ *        processed. (Value: "SUCCESSFUL")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the batch request was submitted to the server. */
+@property(nonatomic, strong, nullable) GTLRDateTime *submitTime;
+
+@end
+
+
+/**
+ *  Logical element on the page.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Block : GTLRObject
+
+/**
+ *  Detected block type (text, image etc) for this block.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Barcode
+ *        Barcode block. (Value: "BARCODE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Picture
+ *        Image block. (Value: "PICTURE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Ruler
+ *        Horizontal/vertical line box. (Value: "RULER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Table Table
+ *        block. (Value: "TABLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Text Regular
+ *        text block. (Value: "TEXT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Unknown
+ *        Unknown block type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *blockType;
+
+/**
+ *  The bounding box for the block.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results on the block. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** List of paragraphs in this block (if this blocks is of type text). */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Paragraph *> *paragraphs;
+
+/** Additional information detected for the block. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
+
+@end
+
+
+/**
+ *  A bounding polygon for the detected image annotation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly : GTLRObject
+
+/** The bounding polygon normalized vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex *> *normalizedVertices;
+
+/** The bounding polygon vertices. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Vertex *> *vertices;
+
+@end
+
+
+/**
+ *  Color information consists of RGB channels, score, and the fraction of
+ *  the image that the color occupies in the image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo : GTLRObject
+
+/** RGB components of the color. */
+@property(nonatomic, strong, nullable) GTLRVision_Color *color;
+
+/**
+ *  The fraction of pixels the color occupies in the image.
+ *  Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pixelFraction;
+
+/**
+ *  Image-specific score for this color. Value in range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Single crop hint that is used to generate a new crop when serving an image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1CropHint : GTLRObject
+
+/**
+ *  The bounding polygon for the crop region. The coordinates of the bounding
+ *  box are in the original image's scale.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Confidence of this being a salient region. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Fraction of importance of this salient region with respect to the original
+ *  image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *importanceFraction;
+
+@end
+
+
+/**
+ *  Set of crop hints that are used to generate new crops when serving images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation : GTLRObject
+
+/** Crop hint results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1CropHint *> *cropHints;
+
+@end
+
+
+/**
+ *  Set of dominant colors and their corresponding scores.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation : GTLRObject
+
+/** RGB color values with their score and pixel fraction. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo *> *colors;
+
+@end
+
+
+/**
+ *  Set of detected entity features.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation : GTLRObject
+
+/**
+ *  Image region to which this entity belongs. Not produced
+ *  for `LABEL_DETECTION` features.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
+
+/**
+ *  **Deprecated. Use `score` instead.**
+ *  The accuracy of the entity detection in an image.
+ *  For example, for an image in which the "Eiffel Tower" entity is detected,
+ *  this field represents the confidence that there is a tower in the query
+ *  image. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Entity textual description, expressed in its `locale` language.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The language code for the locale in which the entity textual
+ *  `description` is expressed.
+ */
+@property(nonatomic, copy, nullable) NSString *locale;
+
+/**
+ *  The location information for the detected entity. Multiple
+ *  `LocationInfo` elements can be present because one location may
+ *  indicate the location of the scene in the image, and another location
+ *  may indicate the location of the place where the image was taken.
+ *  Location information is usually present for landmarks.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo *> *locations;
+
+/**
+ *  Opaque entity ID. Some IDs may be available in
+ *  [Google Knowledge Graph Search
+ *  API](https://developers.google.com/knowledge-graph/).
+ */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/**
+ *  Some entities may have optional user-supplied `Property` (name/value)
+ *  fields, such a score or string that qualifies the entity.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Property *> *properties;
+
+/**
+ *  Overall score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/**
+ *  The relevancy of the ICA (Image Content Annotation) label to the
+ *  image. For example, the relevancy of "tower" is likely higher to an image
+ *  containing the detected "Eiffel Tower" than to an image containing a
+ *  detected distant towering building, even though the confidence that
+ *  there is a tower in each image may be the same. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *topicality;
+
+@end
+
+
+/**
+ *  A face annotation object contains the results of face detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation : GTLRObject
+
+/**
+ *  Anger likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *angerLikelihood;
+
+/**
+ *  Blurred likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *blurredLikelihood;
+
+/**
+ *  The bounding polygon around the face. The coordinates of the bounding box
+ *  are in the original image's scale.
+ *  The bounding box is computed to "frame" the face in accordance with human
+ *  expectations. It is based on the landmarker results.
+ *  Note that one or more x and/or y coordinates may not be generated in the
+ *  `BoundingPoly` (the polygon will be unbounded) if only a partial face
+ *  appears in the image to be annotated.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
+
+/**
+ *  Detection confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
+
+/**
+ *  The `fd_bounding_poly` bounding polygon is tighter than the
+ *  `boundingPoly`, and encloses only the skin part of the face. Typically, it
+ *  is used to eliminate the face from any image analysis that detects the
+ *  "amount of skin" visible in an image. It is not based on the
+ *  landmarker results, only on the initial face detection, hence
+ *  the <code>fd</code> (face detection) prefix.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *fdBoundingPoly;
+
+/**
+ *  Headwear likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *headwearLikelihood;
+
+/**
+ *  Joy likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *joyLikelihood;
+
+/**
+ *  Face landmarking confidence. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
+
+/** Detected face landmarks. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark *> *landmarks;
+
+/**
+ *  Yaw angle, which indicates the leftward/rightward angle that the face is
+ *  pointing relative to the vertical plane perpendicular to the image. Range
+ *  [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *panAngle;
+
+/**
+ *  Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
+ *  of the face relative to the image vertical about the axis perpendicular to
+ *  the face. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *rollAngle;
+
+/**
+ *  Sorrow likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
+
+/**
+ *  Surprise likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
+
+/**
+ *  Pitch angle, which indicates the upwards/downwards angle that the face is
+ *  pointing relative to the image's horizontal plane. Range [-180,180].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *tiltAngle;
+
+/**
+ *  Under-exposed likelihood.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
+
+@end
+
+
+/**
+ *  A face-specific landmark (for example, a face feature).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark : GTLRObject
+
+/** Face landmark position. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1Position *position;
+
+/**
+ *  Face landmark type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinGnathion
+ *        Chin gnathion. (Value: "CHIN_GNATHION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinLeftGonion
+ *        Chin left gonion. (Value: "CHIN_LEFT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinRightGonion
+ *        Chin right gonion. (Value: "CHIN_RIGHT_GONION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ForeheadGlabella
+ *        Forehead glabella. (Value: "FOREHEAD_GLABELLA")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEarTragion
+ *        Left ear tragion. (Value: "LEFT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEye
+ *        Left eye. (Value: "LEFT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
+ *        Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
+ *        Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
+ *        Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyePupil
+ *        Left eye pupil. (Value: "LEFT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
+ *        Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
+ *        Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
+ *        Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
+ *        Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LowerLip
+ *        Lower lip. (Value: "LOWER_LIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
+ *        Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthCenter
+ *        Mouth center. (Value: "MOUTH_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthLeft
+ *        Mouth left. (Value: "MOUTH_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthRight
+ *        Mouth right. (Value: "MOUTH_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomCenter
+ *        Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomLeft
+ *        Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomRight
+ *        Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseTip
+ *        Nose tip. (Value: "NOSE_TIP")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEarTragion
+ *        Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEye
+ *        Right eye. (Value: "RIGHT_EYE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
+ *        Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
+ *        Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
+ *        Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyePupil
+ *        Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
+ *        Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
+ *        Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
+ *        Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
+ *        Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UnknownLandmark
+ *        Unknown face landmark detected. Should not be filled. (Value:
+ *        "UNKNOWN_LANDMARK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UpperLip
+ *        Upper lip. (Value: "UPPER_LIP")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the output will be written to.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination : GTLRObject
+
+/**
+ *  Google Cloud Storage URI where the results will be stored. Results will
+ *  be in JSON format and preceded by its corresponding input URI. This field
+ *  can either represent a single file, or a prefix for multiple outputs.
+ *  Prefixes must end in a `/`.
+ *  Examples:
+ *  * File: gs://bucket-name/filename.json
+ *  * Prefix: gs://bucket-name/prefix/here/
+ *  * File: gs://bucket-name/prefix/here
+ *  If multiple outputs, each response is still AnnotateFileResponse, each of
+ *  which contains some subset of the full list of AnnotateImageResponse.
+ *  Multiple outputs can happen if, for example, the output JSON is too large
+ *  and overflows into multiple sharded files.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location where the input will be read from.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1GcsSource : GTLRObject
+
+/**
+ *  Google Cloud Storage URI for the input file. This must only be a
+ *  Google Cloud Storage object. Wildcards are not currently supported.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  If an image was produced from a file (e.g. a PDF), this message gives
+ *  information about the source of that image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext : GTLRObject
+
+/**
+ *  If the file was a PDF or TIFF, this field gives the page number within
+ *  the file used to produce the image.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pageNumber;
+
+/** The URI of the file used to produce the image. */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Stores image properties, such as dominant colors.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties : GTLRObject
+
+/** If present, dominant colors completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation *dominantColors;
+
+@end
+
+
+/**
+ *  Response message for the `ImportProductSets` method.
+ *  This message is returned by the
+ *  google.longrunning.Operations.GetOperation method in the returned
+ *  google.longrunning.Operation.response field.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ImportProductSetsResponse : GTLRObject
+
+/** The list of reference_images that are imported successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage *> *referenceImages;
+
+/**
+ *  The rpc status for each ImportProductSet request, including both successes
+ *  and errors.
+ *  The number of statuses here matches the number of lines in the csv file,
+ *  and statuses[i] stores the success or failure status of processing the i-th
+ *  line of the csv, starting from line 0.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Status *> *statuses;
+
+@end
+
+
+/**
+ *  The desired input location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1InputConfig : GTLRObject
+
+/** The Google Cloud Storage location to read the input from. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1GcsSource *gcsSource;
+
+/**
+ *  The type of the file. Currently only "application/pdf" and "image/tiff"
+ *  are supported. Wildcards are not supported.
+ */
+@property(nonatomic, copy, nullable) NSString *mimeType;
+
+@end
+
+
+/**
+ *  Set of detected objects with bounding boxes.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation : GTLRObject
+
+/** Image region to which this object belongs. This must be populated. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+/** Object ID that should align with EntityAnnotation mid. */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/** Object name, expressed in its `language_code` language. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Detected entity location information.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo : GTLRObject
+
+/** lat/long location coordinates. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the normalized vertex coordinates are relative to the original image
+ *  and range from 0 to 1.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Contains metadata for the BatchAnnotateImages operation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata : GTLRObject
+
+/** The time when the batch request was received. */
+@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
+
+/**
+ *  Current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Cancelled
+ *        The batch processing was cancelled. (Value: "CANCELLED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Created
+ *        Request is received. (Value: "CREATED")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Done
+ *        The batch processing is done. (Value: "DONE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Running
+ *        Request is actively being processed. (Value: "RUNNING")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_StateUnspecified
+ *        Invalid. (Value: "STATE_UNSPECIFIED")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the operation result was last updated. */
+@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
+
+@end
+
+
+/**
+ *  The desired output location and metadata.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig : GTLRObject
+
+/**
+ *  The max number of response protos to put into each output JSON file on
+ *  Google Cloud Storage.
+ *  The valid range is [1, 100]. If not specified, the default value is 20.
+ *  For example, for one pdf file with 100 pages, 100 response protos will
+ *  be generated. If `batch_size` = 20, then 5 json files each
+ *  containing 20 response protos will be written under the prefix
+ *  `gcs_destination`.`uri`.
+ *  Currently, batch_size only applies to GcsDestination, with potential future
+ *  support for other output configurations.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *batchSize;
+
+/** The Google Cloud Storage location to write the output(s) to. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination *gcsDestination;
+
+@end
+
+
+/**
+ *  Detected page from OCR.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Page : GTLRObject
+
+/** List of blocks of text, images etc on this page. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Block *> *blocks;
+
+/**
+ *  Confidence of the OCR results on the page. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Page height. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *height;
+
+/** Additional information detected on the page. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
+
+/**
+ *  Page width. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *width;
+
+@end
+
+
+/**
+ *  Structural unit of text representing a number of words in certain order.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Paragraph : GTLRObject
+
+/**
+ *  The bounding box for the paragraph.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the paragraph. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the paragraph. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
+
+/** List of words in this paragraph. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Word *> *words;
+
+@end
+
+
+/**
+ *  A 3D position in the image, used primarily for Face detection landmarks.
+ *  A valid Position must have both x and y coordinates.
+ *  The position coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Position : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+/**
+ *  Z coordinate (or depth).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *z;
+
+@end
+
+
+/**
+ *  A Product contains ReferenceImages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Product : GTLRObject
+
+/**
+ *  User-provided metadata to be stored with this product. Must be at most 4096
+ *  characters long.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The user-provided name for this Product. Must not be empty. Must be at most
+ *  4096 characters long.
+ */
+@property(nonatomic, copy, nullable) NSString *displayName;
+
+/**
+ *  The resource name of the product.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *  This field is ignored when creating a product.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The category for the product identified by the reference image. This should
+ *  be either "homegoods", "apparel", or "toys".
+ *  This field is immutable.
+ */
+@property(nonatomic, copy, nullable) NSString *productCategory;
+
+/**
+ *  Key-value pairs that can be attached to a product. At query time,
+ *  constraints can be specified based on the product_labels.
+ *  Note that integer values can be provided as strings, e.g. "1199". Only
+ *  strings with integer values can match a range-based restriction which is
+ *  to be supported soon.
+ *  Multiple values can be assigned to the same key. One product may have up to
+ *  100 product_labels.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue *> *productLabels;
+
+@end
+
+
+/**
+ *  A product label represented as a key-value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue : GTLRObject
+
+/**
+ *  The key of the label attached to the product. Cannot be empty and cannot
+ *  exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *key;
+
+/**
+ *  The value of the label attached to the product. Cannot be empty and
+ *  cannot exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  Results for a product search request.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults : GTLRObject
+
+/**
+ *  Timestamp of the index which provided these results. Changes made after
+ *  this time are not reflected in the current results.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
+
+/**
+ *  List of results grouped by products detected in the query image. Each entry
+ *  corresponds to one bounding polygon in the query image, and contains the
+ *  matching products specific to that region. There may be duplicate product
+ *  matches in the union of all the per-product results.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about the products similar to a single product in a query
+ *  image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult : GTLRObject
+
+/** The bounding polygon around the product detected in the query image. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult *> *results;
+
+@end
+
+
+/**
+ *  Information about a product.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult : GTLRObject
+
+/**
+ *  The resource name of the image from the product that is the closest match
+ *  to the query.
+ */
+@property(nonatomic, copy, nullable) NSString *image;
+
+/** The Product. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1Product *product;
+
+/**
+ *  A confidence level on the match, ranging from 0 (no confidence) to
+ *  1 (full confidence).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  A `Property` consists of a user-supplied name/value pair.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Property : GTLRObject
+
+/** Name of the property. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Value of numeric properties.
+ *
+ *  Uses NSNumber of unsignedLongLongValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *uint64Value;
+
+/** Value of the property. */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  A `ReferenceImage` represents a product image and its associated metadata,
+ *  such as bounding boxes.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage : GTLRObject
+
+/**
+ *  Bounding polygons around the areas of interest in the reference image.
+ *  Optional. If this field is empty, the system will try to detect regions of
+ *  interest. At most 10 bounding polygons will be used.
+ *  The provided shape is converted into a non-rotated rectangle. Once
+ *  converted, the small edge of the rectangle must be greater than or equal
+ *  to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
+ *  is not).
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *> *boundingPolys;
+
+/**
+ *  The resource name of the reference image.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
+ *  This field is ignored when creating a reference image.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The Google Cloud Storage URI of the reference image.
+ *  The URI must start with `gs://`.
+ *  Required.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Set of features pertaining to the image, computed by computer vision
+ *  methods over safe-search verticals (for example, adult, spoof, medical,
+ *  violence).
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation : GTLRObject
+
+/**
+ *  Represents the adult content likelihood for the image. Adult content may
+ *  contain elements such as nudity, pornographic images or cartoons, or
+ *  sexual activities.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *adult;
+
+/**
+ *  Likelihood that this is a medical image.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *medical;
+
+/**
+ *  Likelihood that the request image contains racy content. Racy content may
+ *  include (but is not limited to) skimpy or sheer clothing, strategically
+ *  covered nudity, lewd or provocative poses, or close-ups of sensitive
+ *  body areas.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *racy;
+
+/**
+ *  Spoof likelihood. The likelihood that an modification
+ *  was made to the image's canonical version to make it appear
+ *  funny or offensive.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *spoof;
+
+/**
+ *  Likelihood that this image contains violent content.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Likely
+ *        It is likely that the image belongs to the specified vertical. (Value:
+ *        "LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Possible
+ *        It is possible that the image belongs to the specified vertical.
+ *        (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unknown
+ *        Unknown likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unlikely
+ *        It is unlikely that the image belongs to the specified vertical.
+ *        (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryLikely
+ *        It is very likely that the image belongs to the specified vertical.
+ *        (Value: "VERY_LIKELY")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryUnlikely
+ *        It is very unlikely that the image belongs to the specified vertical.
+ *        (Value: "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *violence;
+
+@end
+
+
+/**
+ *  A single symbol representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Symbol : GTLRObject
+
+/**
+ *  The bounding box for the symbol.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertice order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the symbol. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the symbol. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
+
+/** The actual UTF-8 representation of the symbol. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  TextAnnotation contains a structured representation of OCR extracted text.
+ *  The hierarchy of an OCR extracted text structure is like this:
+ *  TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
+ *  Each structural component, starting from Page, may further have their own
+ *  properties. Properties describe detected languages, breaks etc.. Please
+ *  refer
+ *  to the TextAnnotation.TextProperty message definition below for more
+ *  detail.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation : GTLRObject
+
+/** List of pages detected by OCR. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Page *> *pages;
+
+/** UTF-8 text detected on the pages. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  Detected start or end of a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak : GTLRObject
+
+/**
+ *  True if break prepends the element.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *isPrefix;
+
+/**
+ *  Detected break type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_EolSureSpace
+ *        Line-wrapping break. (Value: "EOL_SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Hyphen
+ *        End-line hyphen that is not present in text; does not co-occur with
+ *        `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_LineBreak
+ *        Line break that ends a paragraph. (Value: "LINE_BREAK")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Space
+ *        Regular space. (Value: "SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_SureSpace
+ *        Sure space (very wide). (Value: "SURE_SPACE")
+ *    @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Unknown
+ *        Unknown break label type. (Value: "UNKNOWN")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  Detected language for a structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage : GTLRObject
+
+/**
+ *  Confidence of detected language. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Additional information detected on the structural component.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty : GTLRObject
+
+/** Detected start or end of a text segment. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak *detectedBreak;
+
+/** A list of detected languages together with confidence. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the vertex coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Vertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Relevant information for the image from the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetection : GTLRObject
+
+/**
+ *  The service's best guess as to the topic of the request image.
+ *  Inferred from similar images on the open web.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel *> *bestGuessLabels;
+
+/**
+ *  Fully matching images from the Internet.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Web pages containing the matching images from the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage *> *pagesWithMatchingImages;
+
+/**
+ *  Partial matching images from the Internet.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/** The visually similar image results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *visuallySimilarImages;
+
+/** Deduced entities from similar images on the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity *> *webEntities;
+
+@end
+
+
+/**
+ *  Entity deduced from similar images on the Internet.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity : GTLRObject
+
+/**
+ *  Canonical description of the entity, in English.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/** Opaque entity ID. */
+@property(nonatomic, copy, nullable) NSString *entityId;
+
+/**
+ *  Overall relevancy score for the entity.
+ *  Not normalized and not comparable across different image queries.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Metadata for online images.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage : GTLRObject
+
+/**
+ *  (Deprecated) Overall relevancy score for the image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result image URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  Label to provide extra metadata for the web detection.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel : GTLRObject
+
+/** Label for extra metadata. */
+@property(nonatomic, copy, nullable) NSString *label;
+
+/**
+ *  The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
+ *  For more information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Metadata for web pages.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage : GTLRObject
+
+/**
+ *  Fully matching images on the page.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *fullMatchingImages;
+
+/** Title for the web page, may contain HTML markups. */
+@property(nonatomic, copy, nullable) NSString *pageTitle;
+
+/**
+ *  Partial matching images on the page.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its
+ *  crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *partialMatchingImages;
+
+/**
+ *  (Deprecated) Overall relevancy score for the web page.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result web page URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  A word representation.
+ */
+@interface GTLRVision_GoogleCloudVisionV1p3beta1Word : GTLRObject
+
+/**
+ *  The bounding box for the word.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the word. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the word. */
+@property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
+
+/**
+ *  List of symbols in the word.
+ *  The order of the symbols follows the natural reading order.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Symbol *> *symbols;
+
+@end
+
+
+/**
+ *  Information about the products similar to a single product in a query
+ *  image.
+ */
+@interface GTLRVision_GroupedResult : GTLRObject
+
+/** The bounding polygon around the product detected in the query image. */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Result *> *results;
+
+@end
+
+
+/**
+ *  Client image to perform Google Cloud Vision API tasks over.
+ */
+@interface GTLRVision_Image : GTLRObject
+
+/**
+ *  Image content, represented as a stream of bytes.
+ *  Note: As with all `bytes` fields, protobuffers use a pure binary
+ *  representation, whereas JSON representations use base64.
+ *
+ *  Contains encoded binary data; GTLRBase64 can encode/decode (probably
+ *  web-safe format).
+ */
+@property(nonatomic, copy, nullable) NSString *content;
+
+/**
+ *  Google Cloud Storage image location, or publicly-accessible image
+ *  URL. If both `content` and `source` are provided for an image, `content`
+ *  takes precedence and is used to perform the image annotation request.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_ImageSource *source;
+
+@end
+
+
+/**
+ *  If an image was produced from a file (e.g. a PDF), this message gives
+ *  information about the source of that image.
+ */
+@interface GTLRVision_ImageAnnotationContext : GTLRObject
+
+/**
+ *  If the file was a PDF or TIFF, this field gives the page number within
+ *  the file used to produce the image.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pageNumber;
+
+/** The URI of the file used to produce the image. */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Image context and/or feature-specific parameters.
+ */
+@interface GTLRVision_ImageContext : GTLRObject
+
+/** Parameters for crop hints annotation request. */
+@property(nonatomic, strong, nullable) GTLRVision_CropHintsParams *cropHintsParams;
+
+/**
+ *  List of languages to use for TEXT_DETECTION. In most cases, an empty value
+ *  yields the best results since it enables automatic language detection. For
+ *  languages based on the Latin alphabet, setting `language_hints` is not
+ *  needed. In rare cases, when the language of the text in the image is known,
+ *  setting a hint will help get better results (although it will be a
+ *  significant hindrance if the hint is wrong). Text detection returns an
+ *  error if one or more of the specified languages is not one of the
+ *  [supported languages](/vision/docs/languages).
+ */
+@property(nonatomic, strong, nullable) NSArray<NSString *> *languageHints;
+
+/** Not used. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLongRect *latLongRect;
+
+/** Parameters for product search. */
+@property(nonatomic, strong, nullable) GTLRVision_ProductSearchParams *productSearchParams;
+
+/** Parameters for web detection. */
+@property(nonatomic, strong, nullable) GTLRVision_WebDetectionParams *webDetectionParams;
+
+@end
+
+
+/**
+ *  Stores image properties, such as dominant colors.
+ */
+@interface GTLRVision_ImageProperties : GTLRObject
+
+/** If present, dominant colors completed successfully. */
+@property(nonatomic, strong, nullable) GTLRVision_DominantColorsAnnotation *dominantColors;
+
+@end
+
+
+/**
+ *  External image source (Google Cloud Storage or web URL image location).
+ */
+@interface GTLRVision_ImageSource : GTLRObject
+
+/**
+ *  **Use `image_uri` instead.**
+ *  The Google Cloud Storage URI of the form
+ *  `gs://bucket_name/object_name`. Object versioning is not supported. See
+ *  [Google Cloud Storage Request
+ *  URIs](https://cloud.google.com/storage/docs/reference-uris) for more info.
+ */
+@property(nonatomic, copy, nullable) NSString *gcsImageUri;
+
+/**
+ *  The URI of the source image. Can be either:
+ *  1. A Google Cloud Storage URI of the form
+ *  `gs://bucket_name/object_name`. Object versioning is not supported. See
+ *  [Google Cloud Storage Request
+ *  URIs](https://cloud.google.com/storage/docs/reference-uris) for more
+ *  info.
+ *  2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from
+ *  HTTP/HTTPS URLs, Google cannot guarantee that the request will be
+ *  completed. Your request may fail if the specified host denies the
+ *  request (e.g. due to request throttling or DOS prevention), or if Google
+ *  throttles requests to the site for abuse prevention. You should not
+ *  depend on externally-hosted images for production applications.
+ *  When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes
+ *  precedence.
+ */
+@property(nonatomic, copy, nullable) NSString *imageUri;
+
+@end
+
+
+/**
+ *  The Google Cloud Storage location for a csv file which preserves a list of
+ *  ImportProductSetRequests in each line.
+ */
+@interface GTLRVision_ImportProductSetsGcsSource : GTLRObject
+
+/**
+ *  The Google Cloud Storage URI of the input csv file.
+ *  The URI must start with `gs://`.
+ *  The format of the input csv file should be one image per line.
+ *  In each line, there are 8 columns.
+ *  1. image-uri
+ *  2. image-id
+ *  3. product-set-id
+ *  4. product-id
+ *  5. product-category
+ *  6. product-display-name
+ *  7. labels
+ *  8. bounding-poly
+ *  The `image-uri`, `product-set-id`, `product-id`, and `product-category`
+ *  columns are required. All other columns are optional.
+ *  If the `ProductSet` or `Product` specified by the `product-set-id` and
+ *  `product-id` values does not exist, then the system will create a new
+ *  `ProductSet` or `Product` for the image. In this case, the
+ *  `product-display-name` column refers to
+ *  display_name, the
+ *  `product-category` column refers to
+ *  product_category, and the
+ *  `labels` column refers to product_labels.
+ *  The `image-id` column is optional but must be unique if provided. If it is
+ *  empty, the system will automatically assign a unique id to the image.
+ *  The `product-display-name` column is optional. If it is empty, the system
+ *  sets the display_name field for the product to a
+ *  space (" "). You can update the `display_name` later by using the API.
+ *  If a `Product` with the specified `product-id` already exists, then the
+ *  system ignores the `product-display-name`, `product-category`, and `labels`
+ *  columns.
+ *  The `labels` column (optional) is a line containing a list of
+ *  comma-separated key-value pairs, in the following format:
+ *  "key_1=value_1,key_2=value_2,...,key_n=value_n"
+ *  The `bounding-poly` column (optional) identifies one region of
+ *  interest from the image in the same manner as `CreateReferenceImage`. If
+ *  you do not specify the `bounding-poly` column, then the system will try to
+ *  detect regions of interest automatically.
+ *  At most one `bounding-poly` column is allowed per line. If the image
+ *  contains multiple regions of interest, add a line to the CSV file that
+ *  includes the same product information, and the `bounding-poly` values for
+ *  each region of interest.
+ *  The `bounding-poly` column must contain an even number of comma-separated
+ *  numbers, in the format "p1_x,p1_y,p2_x,p2_y,...,pn_x,pn_y". Use
+ *  non-negative integers for absolute bounding polygons, and float values
+ *  in [0, 1] for normalized bounding polygons.
+ *  The system will resize the image if the image resolution is too
+ *  large to process (larger than 20MP).
+ */
+@property(nonatomic, copy, nullable) NSString *csvFileUri;
+
+@end
+
+
+/**
+ *  The input content for the `ImportProductSets` method.
+ */
+@interface GTLRVision_ImportProductSetsInputConfig : GTLRObject
+
+/**
+ *  The Google Cloud Storage location for a csv file which preserves a list
+ *  of ImportProductSetRequests in each line.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_ImportProductSetsGcsSource *gcsSource;
+
+@end
+
+
+/**
+ *  Request message for the `ImportProductSets` method.
+ */
+@interface GTLRVision_ImportProductSetsRequest : GTLRObject
+
+/** The input content for the list of requests. */
+@property(nonatomic, strong, nullable) GTLRVision_ImportProductSetsInputConfig *inputConfig;
+
+@end
+
+
+/**
+ *  Response message for the `ImportProductSets` method.
+ *  This message is returned by the
+ *  google.longrunning.Operations.GetOperation method in the returned
+ *  google.longrunning.Operation.response field.
+ */
+@interface GTLRVision_ImportProductSetsResponse : GTLRObject
+
+/** The list of reference_images that are imported successfully. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_ReferenceImage *> *referenceImages;
+
+/**
+ *  The rpc status for each ImportProductSet request, including both successes
+ *  and errors.
+ *  The number of statuses here matches the number of lines in the csv file,
+ *  and statuses[i] stores the success or failure status of processing the i-th
+ *  line of the csv, starting from line 0.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Status *> *statuses;
+
+@end
+
+
+/**
+ *  The desired input location and metadata.
+ */
+@interface GTLRVision_InputConfig : GTLRObject
+
+/** The Google Cloud Storage location to read the input from. */
+@property(nonatomic, strong, nullable) GTLRVision_GcsSource *gcsSource;
+
+/**
+ *  The type of the file. Currently only "application/pdf" and "image/tiff"
+ *  are supported. Wildcards are not supported.
+ */
+@property(nonatomic, copy, nullable) NSString *mimeType;
+
+@end
+
+
+/**
+ *  A product label represented as a key-value pair.
+ */
+@interface GTLRVision_KeyValue : GTLRObject
+
+/**
+ *  The key of the label attached to the product. Cannot be empty and cannot
+ *  exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *key;
+
+/**
+ *  The value of the label attached to the product. Cannot be empty and
+ *  cannot exceed 128 bytes.
+ */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  A face-specific landmark (for example, a face feature).
+ */
+@interface GTLRVision_Landmark : GTLRObject
+
+/** Face landmark position. */
+@property(nonatomic, strong, nullable) GTLRVision_Position *position;
+
+/**
+ *  Face landmark type.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_Landmark_Type_ChinGnathion Chin gnathion. (Value:
+ *        "CHIN_GNATHION")
+ *    @arg @c kGTLRVision_Landmark_Type_ChinLeftGonion Chin left gonion. (Value:
+ *        "CHIN_LEFT_GONION")
+ *    @arg @c kGTLRVision_Landmark_Type_ChinRightGonion Chin right gonion.
+ *        (Value: "CHIN_RIGHT_GONION")
+ *    @arg @c kGTLRVision_Landmark_Type_ForeheadGlabella Forehead glabella.
+ *        (Value: "FOREHEAD_GLABELLA")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEarTragion Left ear tragion. (Value:
+ *        "LEFT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEye Left eye. (Value: "LEFT_EYE")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyeBottomBoundary Left eye, bottom
+ *        boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyebrowUpperMidpoint Left eyebrow,
+ *        upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyeLeftCorner Left eye, left corner.
+ *        (Value: "LEFT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyePupil Left eye pupil. (Value:
+ *        "LEFT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyeRightCorner Left eye, right
+ *        corner. (Value: "LEFT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftEyeTopBoundary Left eye, top
+ *        boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftOfLeftEyebrow Left of left eyebrow.
+ *        (Value: "LEFT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_Landmark_Type_LeftOfRightEyebrow Left of right
+ *        eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_Landmark_Type_LowerLip Lower lip. (Value: "LOWER_LIP")
+ *    @arg @c kGTLRVision_Landmark_Type_MidpointBetweenEyes Midpoint between
+ *        eyes. (Value: "MIDPOINT_BETWEEN_EYES")
+ *    @arg @c kGTLRVision_Landmark_Type_MouthCenter Mouth center. (Value:
+ *        "MOUTH_CENTER")
+ *    @arg @c kGTLRVision_Landmark_Type_MouthLeft Mouth left. (Value:
+ *        "MOUTH_LEFT")
+ *    @arg @c kGTLRVision_Landmark_Type_MouthRight Mouth right. (Value:
+ *        "MOUTH_RIGHT")
+ *    @arg @c kGTLRVision_Landmark_Type_NoseBottomCenter Nose, bottom center.
+ *        (Value: "NOSE_BOTTOM_CENTER")
+ *    @arg @c kGTLRVision_Landmark_Type_NoseBottomLeft Nose, bottom left.
+ *        (Value: "NOSE_BOTTOM_LEFT")
+ *    @arg @c kGTLRVision_Landmark_Type_NoseBottomRight Nose, bottom right.
+ *        (Value: "NOSE_BOTTOM_RIGHT")
+ *    @arg @c kGTLRVision_Landmark_Type_NoseTip Nose tip. (Value: "NOSE_TIP")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEarTragion Right ear tragion.
+ *        (Value: "RIGHT_EAR_TRAGION")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEye Right eye. (Value: "RIGHT_EYE")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyeBottomBoundary Right eye, bottom
+ *        boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyebrowUpperMidpoint Right eyebrow,
+ *        upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyeLeftCorner Right eye, left
+ *        corner. (Value: "RIGHT_EYE_LEFT_CORNER")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyePupil Right eye pupil. (Value:
+ *        "RIGHT_EYE_PUPIL")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyeRightCorner Right eye, right
+ *        corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
+ *    @arg @c kGTLRVision_Landmark_Type_RightEyeTopBoundary Right eye, top
+ *        boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
+ *    @arg @c kGTLRVision_Landmark_Type_RightOfLeftEyebrow Right of left
+ *        eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
+ *    @arg @c kGTLRVision_Landmark_Type_RightOfRightEyebrow Right of right
+ *        eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
+ *    @arg @c kGTLRVision_Landmark_Type_UnknownLandmark Unknown face landmark
+ *        detected. Should not be filled. (Value: "UNKNOWN_LANDMARK")
+ *    @arg @c kGTLRVision_Landmark_Type_UpperLip Upper lip. (Value: "UPPER_LIP")
+ */
+@property(nonatomic, copy, nullable) NSString *type;
+
+@end
+
+
+/**
+ *  An object representing a latitude/longitude pair. This is expressed as a
+ *  pair
+ *  of doubles representing degrees latitude and degrees longitude. Unless
+ *  specified otherwise, this must conform to the
+ *  <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
+ *  standard</a>. Values must be within normalized ranges.
+ */
+@interface GTLRVision_LatLng : GTLRObject
+
+/**
+ *  The latitude in degrees. It must be in the range [-90.0, +90.0].
+ *
+ *  Uses NSNumber of doubleValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *latitude;
+
+/**
+ *  The longitude in degrees. It must be in the range [-180.0, +180.0].
+ *
+ *  Uses NSNumber of doubleValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *longitude;
+
+@end
+
+
+/**
+ *  Rectangle determined by min and max `LatLng` pairs.
+ */
+@interface GTLRVision_LatLongRect : GTLRObject
+
+/** Max lat/long pair. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *maxLatLng;
+
+/** Min lat/long pair. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *minLatLng;
+
+@end
+
+
+/**
+ *  The response message for Operations.ListOperations.
+ *
+ *  @note This class supports NSFastEnumeration and indexed subscripting over
+ *        its "operations" property. If returned as the result of a query, it
+ *        should support automatic pagination (when @c shouldFetchNextPages is
+ *        enabled).
+ */
+@interface GTLRVision_ListOperationsResponse : GTLRCollectionObject
+
+/** The standard List next-page token. */
+@property(nonatomic, copy, nullable) NSString *nextPageToken;
+
+/**
+ *  A list of operations that matches the specified filter in the request.
+ *
+ *  @note This property is used to support NSFastEnumeration and indexed
+ *        subscripting on this class.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Operation *> *operations;
+
+@end
+
+
+/**
+ *  Response message for the `ListProductSets` method.
+ *
+ *  @note This class supports NSFastEnumeration and indexed subscripting over
+ *        its "productSets" property. If returned as the result of a query, it
+ *        should support automatic pagination (when @c shouldFetchNextPages is
+ *        enabled).
+ */
+@interface GTLRVision_ListProductSetsResponse : GTLRCollectionObject
+
+/**
+ *  Token to retrieve the next page of results, or empty if there are no more
+ *  results in the list.
+ */
+@property(nonatomic, copy, nullable) NSString *nextPageToken;
+
+/**
+ *  List of ProductSets.
+ *
+ *  @note This property is used to support NSFastEnumeration and indexed
+ *        subscripting on this class.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_ProductSet *> *productSets;
+
+@end
+
+
+/**
+ *  Response message for the `ListProductsInProductSet` method.
+ *
+ *  @note This class supports NSFastEnumeration and indexed subscripting over
+ *        its "products" property. If returned as the result of a query, it
+ *        should support automatic pagination (when @c shouldFetchNextPages is
+ *        enabled).
+ */
+@interface GTLRVision_ListProductsInProductSetResponse : GTLRCollectionObject
+
+/**
+ *  Token to retrieve the next page of results, or empty if there are no more
+ *  results in the list.
+ */
+@property(nonatomic, copy, nullable) NSString *nextPageToken;
+
+/**
+ *  The list of Products.
+ *
+ *  @note This property is used to support NSFastEnumeration and indexed
+ *        subscripting on this class.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Product *> *products;
+
+@end
+
+
+/**
+ *  Response message for the `ListProducts` method.
+ *
+ *  @note This class supports NSFastEnumeration and indexed subscripting over
+ *        its "products" property. If returned as the result of a query, it
+ *        should support automatic pagination (when @c shouldFetchNextPages is
+ *        enabled).
+ */
+@interface GTLRVision_ListProductsResponse : GTLRCollectionObject
+
+/**
+ *  Token to retrieve the next page of results, or empty if there are no more
+ *  results in the list.
+ */
+@property(nonatomic, copy, nullable) NSString *nextPageToken;
+
+/**
+ *  List of products.
+ *
+ *  @note This property is used to support NSFastEnumeration and indexed
+ *        subscripting on this class.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Product *> *products;
+
+@end
+
+
+/**
+ *  Response message for the `ListReferenceImages` method.
+ *
+ *  @note This class supports NSFastEnumeration and indexed subscripting over
+ *        its "referenceImages" property. If returned as the result of a query,
+ *        it should support automatic pagination (when @c shouldFetchNextPages
+ *        is enabled).
+ */
+@interface GTLRVision_ListReferenceImagesResponse : GTLRCollectionObject
+
+/** The next_page_token returned from a previous List request, if any. */
+@property(nonatomic, copy, nullable) NSString *nextPageToken;
+
+/**
+ *  The maximum number of items to return. Default 10, maximum 100.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *pageSize;
+
+/**
+ *  The list of reference images.
+ *
+ *  @note This property is used to support NSFastEnumeration and indexed
+ *        subscripting on this class.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_ReferenceImage *> *referenceImages;
+
+@end
+
+
+/**
+ *  Set of detected objects with bounding boxes.
+ */
+@interface GTLRVision_LocalizedObjectAnnotation : GTLRObject
+
+/** Image region to which this object belongs. This must be populated. */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/**
+ *  The BCP-47 language code, such as "en-US" or "sr-Latn". For more
+ *  information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+/** Object ID that should align with EntityAnnotation mid. */
+@property(nonatomic, copy, nullable) NSString *mid;
+
+/** Object name, expressed in its `language_code` language. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Score of the result. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Detected entity location information.
+ */
+@interface GTLRVision_LocationInfo : GTLRObject
+
+/** lat/long location coordinates. */
+@property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the normalized vertex coordinates are relative to the original image
+ *  and range from 0 to 1.
+ */
+@interface GTLRVision_NormalizedVertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  This resource represents a long-running operation that is the result of a
+ *  network API call.
+ */
+@interface GTLRVision_Operation : GTLRObject
+
+/**
+ *  If the value is `false`, it means the operation is still in progress.
+ *  If `true`, the operation is completed, and either `error` or `response` is
+ *  available.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *done;
+
+/** The error result of the operation in case of failure or cancellation. */
+@property(nonatomic, strong, nullable) GTLRVision_Status *error;
+
+/**
+ *  Service-specific metadata associated with the operation. It typically
+ *  contains progress information and common metadata such as create time.
+ *  Some services might not provide such metadata. Any method that returns a
+ *  long-running operation should document the metadata type, if any.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Operation_Metadata *metadata;
+
+/**
+ *  The server-assigned name, which is only unique within the same service that
+ *  originally returns it. If you use the default HTTP mapping, the
+ *  `name` should have the format of `operations/some/unique/name`.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The normal response of the operation in case of success. If the original
+ *  method returns no data on success, such as `Delete`, the response is
+ *  `google.protobuf.Empty`. If the original method is standard
+ *  `Get`/`Create`/`Update`, the response should be the resource. For other
+ *  methods, the response should have the type `XxxResponse`, where `Xxx`
+ *  is the original method name. For example, if the original method name
+ *  is `TakeSnapshot()`, the inferred response type is
+ *  `TakeSnapshotResponse`.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Operation_Response *response;
+
+@end
+
+
+/**
+ *  Service-specific metadata associated with the operation. It typically
+ *  contains progress information and common metadata such as create time.
+ *  Some services might not provide such metadata. Any method that returns a
+ *  long-running operation should document the metadata type, if any.
+ *
+ *  @note This class is documented as having more properties of any valid JSON
+ *        type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
+ *        get the list of properties and then fetch them; or @c
+ *        -additionalProperties to fetch them all at once.
+ */
+@interface GTLRVision_Operation_Metadata : GTLRObject
+@end
+
+
+/**
+ *  The normal response of the operation in case of success. If the original
+ *  method returns no data on success, such as `Delete`, the response is
+ *  `google.protobuf.Empty`. If the original method is standard
+ *  `Get`/`Create`/`Update`, the response should be the resource. For other
+ *  methods, the response should have the type `XxxResponse`, where `Xxx`
+ *  is the original method name. For example, if the original method name
+ *  is `TakeSnapshot()`, the inferred response type is
+ *  `TakeSnapshotResponse`.
+ *
+ *  @note This class is documented as having more properties of any valid JSON
+ *        type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
+ *        get the list of properties and then fetch them; or @c
+ *        -additionalProperties to fetch them all at once.
+ */
+@interface GTLRVision_Operation_Response : GTLRObject
+@end
+
+
+/**
+ *  Contains metadata for the BatchAnnotateImages operation.
+ */
+@interface GTLRVision_OperationMetadata : GTLRObject
+
+/** The time when the batch request was received. */
+@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
+
+/**
+ *  Current state of the batch operation.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_OperationMetadata_State_Cancelled The batch processing
+ *        was cancelled. (Value: "CANCELLED")
+ *    @arg @c kGTLRVision_OperationMetadata_State_Created Request is received.
+ *        (Value: "CREATED")
+ *    @arg @c kGTLRVision_OperationMetadata_State_Done The batch processing is
+ *        done. (Value: "DONE")
+ *    @arg @c kGTLRVision_OperationMetadata_State_Running Request is actively
+ *        being processed. (Value: "RUNNING")
+ *    @arg @c kGTLRVision_OperationMetadata_State_StateUnspecified Invalid.
+ *        (Value: "STATE_UNSPECIFIED")
+ */
+@property(nonatomic, copy, nullable) NSString *state;
+
+/** The time when the operation result was last updated. */
+@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
+
+@end
+
+
+/**
+ *  The desired output location and metadata.
+ */
+@interface GTLRVision_OutputConfig : GTLRObject
+
+/**
+ *  The max number of response protos to put into each output JSON file on
+ *  Google Cloud Storage.
+ *  The valid range is [1, 100]. If not specified, the default value is 20.
+ *  For example, for one pdf file with 100 pages, 100 response protos will
+ *  be generated. If `batch_size` = 20, then 5 json files each
+ *  containing 20 response protos will be written under the prefix
+ *  `gcs_destination`.`uri`.
+ *  Currently, batch_size only applies to GcsDestination, with potential future
+ *  support for other output configurations.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *batchSize;
+
+/** The Google Cloud Storage location to write the output(s) to. */
+@property(nonatomic, strong, nullable) GTLRVision_GcsDestination *gcsDestination;
+
+@end
+
+
+/**
+ *  Detected page from OCR.
+ */
+@interface GTLRVision_Page : GTLRObject
+
+/** List of blocks of text, images etc on this page. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Block *> *blocks;
+
+/**
+ *  Confidence of the OCR results on the page. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/**
+ *  Page height. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *height;
+
+/** Additional information detected on the page. */
+@property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
+
+/**
+ *  Page width. For PDFs the unit is points. For images (including
+ *  TIFFs) the unit is pixels.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *width;
+
+@end
+
+
+/**
+ *  Structural unit of text representing a number of words in certain order.
+ */
+@interface GTLRVision_Paragraph : GTLRObject
+
+/**
+ *  The bounding box for the paragraph.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the paragraph. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the paragraph. */
+@property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
+
+/** List of words in this paragraph. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Word *> *words;
+
+@end
+
+
+/**
+ *  A 3D position in the image, used primarily for Face detection landmarks.
+ *  A valid Position must have both x and y coordinates.
+ *  The position coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_Position : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+/**
+ *  Z coordinate (or depth).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *z;
+
+@end
+
+
+/**
+ *  A Product contains ReferenceImages.
+ */
+@interface GTLRVision_Product : GTLRObject
+
+/**
+ *  User-provided metadata to be stored with this product. Must be at most 4096
+ *  characters long.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/**
+ *  The user-provided name for this Product. Must not be empty. Must be at most
+ *  4096 characters long.
+ */
+@property(nonatomic, copy, nullable) NSString *displayName;
+
+/**
+ *  The resource name of the product.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *  This field is ignored when creating a product.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The category for the product identified by the reference image. This should
+ *  be either "homegoods", "apparel", or "toys".
+ *  This field is immutable.
+ */
+@property(nonatomic, copy, nullable) NSString *productCategory;
+
+/**
+ *  Key-value pairs that can be attached to a product. At query time,
+ *  constraints can be specified based on the product_labels.
+ *  Note that integer values can be provided as strings, e.g. "1199". Only
+ *  strings with integer values can match a range-based restriction which is
+ *  to be supported soon.
+ *  Multiple values can be assigned to the same key. One product may have up to
+ *  100 product_labels.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_KeyValue *> *productLabels;
+
+@end
+
+
+/**
+ *  Parameters for a product search request.
+ */
+@interface GTLRVision_ProductSearchParams : GTLRObject
+
+/**
+ *  The bounding polygon around the area of interest in the image.
+ *  Optional. If it is not specified, system discretion will be applied.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
+
+/**
+ *  The filtering expression. This can be used to restrict search results based
+ *  on Product labels. We currently support an AND of OR of key-value
+ *  expressions, where each expression within an OR must have the same key.
+ *  For example, "(color = red OR color = blue) AND brand = Google" is
+ *  acceptable, but not "(color = red OR brand = Google)" or "color: red".
+ */
+@property(nonatomic, copy, nullable) NSString *filter;
+
+/**
+ *  The list of product categories to search in. Currently, we only consider
+ *  the first category, and either "homegoods", "apparel", or "toys" should be
+ *  specified.
+ */
+@property(nonatomic, strong, nullable) NSArray<NSString *> *productCategories;
+
+/**
+ *  The resource name of a ProductSet to be searched for similar images.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *productSet;
+
+@end
+
+
+/**
+ *  Results for a product search request.
+ */
+@interface GTLRVision_ProductSearchResults : GTLRObject
+
+/**
+ *  Timestamp of the index which provided these results. Changes made after
+ *  this time are not reflected in the current results.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
+
+/**
+ *  List of results grouped by products detected in the query image. Each entry
+ *  corresponds to one bounding polygon in the query image, and contains the
+ *  matching products specific to that region. There may be duplicate product
+ *  matches in the union of all the per-product results.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_GroupedResult *> *productGroupedResults;
+
+/** List of results, one for each product match. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Result *> *results;
+
+@end
+
+
+/**
+ *  A ProductSet contains Products. A ProductSet can contain a maximum of 1
+ *  million reference images. If the limit is exceeded, periodic indexing will
+ *  fail.
+ */
+@interface GTLRVision_ProductSet : GTLRObject
+
+/**
+ *  The user-provided name for this ProductSet. Must not be empty. Must be at
+ *  most 4096 characters long.
+ */
+@property(nonatomic, copy, nullable) NSString *displayName;
+
+/**
+ *  Output only. If there was an error with indexing the product set, the field
+ *  is populated.
+ *  This field is ignored when creating a ProductSet.
+ */
+@property(nonatomic, strong, nullable) GTLRVision_Status *indexError;
+
+/**
+ *  Output only. The time at which this ProductSet was last indexed. Query
+ *  results will reflect all updates before this time. If this ProductSet has
+ *  never been indexed, this timestamp is the default value
+ *  "1970-01-01T00:00:00Z".
+ *  This field is ignored when creating a ProductSet.
+ */
+@property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
+
+/**
+ *  The resource name of the ProductSet.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
+ *  This field is ignored when creating a ProductSet.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+@end
+
+
+/**
+ *  A `Property` consists of a user-supplied name/value pair.
+ */
+@interface GTLRVision_Property : GTLRObject
+
+/** Name of the property. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Value of numeric properties.
+ *
+ *  Uses NSNumber of unsignedLongLongValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *uint64Value;
+
+/** Value of the property. */
+@property(nonatomic, copy, nullable) NSString *value;
+
+@end
+
+
+/**
+ *  A `ReferenceImage` represents a product image and its associated metadata,
+ *  such as bounding boxes.
+ */
+@interface GTLRVision_ReferenceImage : GTLRObject
+
+/**
+ *  Bounding polygons around the areas of interest in the reference image.
+ *  Optional. If this field is empty, the system will try to detect regions of
+ *  interest. At most 10 bounding polygons will be used.
+ *  The provided shape is converted into a non-rotated rectangle. Once
+ *  converted, the small edge of the rectangle must be greater than or equal
+ *  to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
+ *  is not).
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_BoundingPoly *> *boundingPolys;
+
+/**
+ *  The resource name of the reference image.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
+ *  This field is ignored when creating a reference image.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The Google Cloud Storage URI of the reference image.
+ *  The URI must start with `gs://`.
+ *  Required.
+ */
+@property(nonatomic, copy, nullable) NSString *uri;
+
+@end
+
+
+/**
+ *  Request message for the `RemoveProductFromProductSet` method.
+ */
+@interface GTLRVision_RemoveProductFromProductSetRequest : GTLRObject
+
+/**
+ *  The resource name for the Product to be removed from this ProductSet.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *product;
+
+@end
+
+
+/**
+ *  Information about a product.
+ */
+@interface GTLRVision_Result : GTLRObject
+
+/**
+ *  The resource name of the image from the product that is the closest match
+ *  to the query.
+ */
+@property(nonatomic, copy, nullable) NSString *image;
+
+/** The Product. */
+@property(nonatomic, strong, nullable) GTLRVision_Product *product;
+
+/**
+ *  A confidence level on the match, ranging from 0 (no confidence) to
+ *  1 (full confidence).
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Set of features pertaining to the image, computed by computer vision
+ *  methods over safe-search verticals (for example, adult, spoof, medical,
+ *  violence).
+ */
+@interface GTLRVision_SafeSearchAnnotation : GTLRObject
+
+/**
+ *  Represents the adult content likelihood for the image. Adult content may
+ *  contain elements such as nudity, pornographic images or cartoons, or
+ *  sexual activities.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Likely It is likely that
+ *        the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Unknown Unknown likelihood.
+ *        (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Adult_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *adult;
+
+/**
+ *  Likelihood that this is a medical image.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Likely It is likely that
+ *        the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Medical_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *medical;
+
+/**
+ *  Likelihood that the request image contains racy content. Racy content may
+ *  include (but is not limited to) skimpy or sheer clothing, strategically
+ *  covered nudity, lewd or provocative poses, or close-ups of sensitive
+ *  body areas.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Likely It is likely that the
+ *        image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Possible It is possible that
+ *        the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Unknown Unknown likelihood.
+ *        (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Unlikely It is unlikely that
+ *        the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_VeryLikely It is very likely
+ *        that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Racy_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *racy;
+
+/**
+ *  Spoof likelihood. The likelihood that an modification
+ *  was made to the image's canonical version to make it appear
+ *  funny or offensive.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Likely It is likely that
+ *        the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Unknown Unknown likelihood.
+ *        (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *spoof;
+
+/**
+ *  Likelihood that this image contains violent content.
+ *
+ *  Likely values:
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Likely It is likely that
+ *        the image belongs to the specified vertical. (Value: "LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Possible It is possible
+ *        that the image belongs to the specified vertical. (Value: "POSSIBLE")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Unknown Unknown
+ *        likelihood. (Value: "UNKNOWN")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Unlikely It is unlikely
+ *        that the image belongs to the specified vertical. (Value: "UNLIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_VeryLikely It is very
+ *        likely that the image belongs to the specified vertical. (Value:
+ *        "VERY_LIKELY")
+ *    @arg @c kGTLRVision_SafeSearchAnnotation_Violence_VeryUnlikely It is very
+ *        unlikely that the image belongs to the specified vertical. (Value:
+ *        "VERY_UNLIKELY")
+ */
+@property(nonatomic, copy, nullable) NSString *violence;
+
+@end
+
+
+/**
+ *  The `Status` type defines a logical error model that is suitable for
+ *  different
+ *  programming environments, including REST APIs and RPC APIs. It is used by
+ *  [gRPC](https://github.com/grpc). The error model is designed to be:
+ *  - Simple to use and understand for most users
+ *  - Flexible enough to meet unexpected needs
+ *  # Overview
+ *  The `Status` message contains three pieces of data: error code, error
+ *  message,
+ *  and error details. The error code should be an enum value of
+ *  google.rpc.Code, but it may accept additional error codes if needed. The
+ *  error message should be a developer-facing English message that helps
+ *  developers *understand* and *resolve* the error. If a localized user-facing
+ *  error message is needed, put the localized message in the error details or
+ *  localize it in the client. The optional error details may contain arbitrary
+ *  information about the error. There is a predefined set of error detail types
+ *  in the package `google.rpc` that can be used for common error conditions.
+ *  # Language mapping
+ *  The `Status` message is the logical representation of the error model, but
+ *  it
+ *  is not necessarily the actual wire format. When the `Status` message is
+ *  exposed in different client libraries and different wire protocols, it can
+ *  be
+ *  mapped differently. For example, it will likely be mapped to some exceptions
+ *  in Java, but more likely mapped to some error codes in C.
+ *  # Other uses
+ *  The error model and the `Status` message can be used in a variety of
+ *  environments, either with or without APIs, to provide a
+ *  consistent developer experience across different environments.
+ *  Example uses of this error model include:
+ *  - Partial errors. If a service needs to return partial errors to the client,
+ *  it may embed the `Status` in the normal response to indicate the partial
+ *  errors.
+ *  - Workflow errors. A typical workflow has multiple steps. Each step may
+ *  have a `Status` message for error reporting.
+ *  - Batch operations. If a client uses batch request and batch response, the
+ *  `Status` message should be used directly inside batch response, one for
+ *  each error sub-response.
+ *  - Asynchronous operations. If an API call embeds asynchronous operation
+ *  results in its response, the status of those operations should be
+ *  represented directly using the `Status` message.
+ *  - Logging. If some API errors are stored in logs, the message `Status` could
+ *  be used directly after any stripping needed for security/privacy reasons.
+ */
+@interface GTLRVision_Status : GTLRObject
+
+/**
+ *  The status code, which should be an enum value of google.rpc.Code.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *code;
+
+/**
+ *  A list of messages that carry the error details. There is a common set of
+ *  message types for APIs to use.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Status_Details_Item *> *details;
+
+/**
+ *  A developer-facing error message, which should be in English. Any
+ *  user-facing error message should be localized and sent in the
+ *  google.rpc.Status.details field, or localized by the client.
+ */
+@property(nonatomic, copy, nullable) NSString *message;
+
+@end
+
+
+/**
+ *  GTLRVision_Status_Details_Item
+ *
+ *  @note This class is documented as having more properties of any valid JSON
+ *        type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
+ *        get the list of properties and then fetch them; or @c
+ *        -additionalProperties to fetch them all at once.
+ */
+@interface GTLRVision_Status_Details_Item : GTLRObject
+@end
+
+
+/**
+ *  A single symbol representation.
+ */
+@interface GTLRVision_Symbol : GTLRObject
+
+/**
+ *  The bounding box for the symbol.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertice order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the symbol. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the symbol. */
+@property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
+
+/** The actual UTF-8 representation of the symbol. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  TextAnnotation contains a structured representation of OCR extracted text.
+ *  The hierarchy of an OCR extracted text structure is like this:
+ *  TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
+ *  Each structural component, starting from Page, may further have their own
+ *  properties. Properties describe detected languages, breaks etc.. Please
+ *  refer
+ *  to the TextAnnotation.TextProperty message definition below for more
+ *  detail.
+ */
+@interface GTLRVision_TextAnnotation : GTLRObject
+
+/** List of pages detected by OCR. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Page *> *pages;
+
+/** UTF-8 text detected on the pages. */
+@property(nonatomic, copy, nullable) NSString *text;
+
+@end
+
+
+/**
+ *  Additional information detected on the structural component.
+ */
+@interface GTLRVision_TextProperty : GTLRObject
+
+/** Detected start or end of a text segment. */
+@property(nonatomic, strong, nullable) GTLRVision_DetectedBreak *detectedBreak;
+
+/** A list of detected languages together with confidence. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_DetectedLanguage *> *detectedLanguages;
+
+@end
+
+
+/**
+ *  A vertex represents a 2D point in the image.
+ *  NOTE: the vertex coordinates are in the same scale as the original image.
+ */
+@interface GTLRVision_Vertex : GTLRObject
+
+/**
+ *  X coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *x;
+
+/**
+ *  Y coordinate.
+ *
+ *  Uses NSNumber of intValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *y;
+
+@end
+
+
+/**
+ *  Relevant information for the image from the Internet.
+ */
+@interface GTLRVision_WebDetection : GTLRObject
+
+/**
+ *  The service's best guess as to the topic of the request image.
+ *  Inferred from similar images on the open web.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebLabel *> *bestGuessLabels;
+
+/**
+ *  Fully matching images from the Internet.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *fullMatchingImages;
+
+/** Web pages containing the matching images from the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebPage *> *pagesWithMatchingImages;
+
+/**
+ *  Partial matching images from the Internet.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *partialMatchingImages;
+
+/** The visually similar image results. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *visuallySimilarImages;
+
+/** Deduced entities from similar images on the Internet. */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebEntity *> *webEntities;
+
+@end
+
+
+/**
+ *  Parameters for web detection request.
+ */
+@interface GTLRVision_WebDetectionParams : GTLRObject
+
+/**
+ *  Whether to include results derived from the geo information in the image.
+ *
+ *  Uses NSNumber of boolValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *includeGeoResults;
+
+@end
+
+
+/**
+ *  Entity deduced from similar images on the Internet.
+ */
+@interface GTLRVision_WebEntity : GTLRObject
+
+/**
+ *  Canonical description of the entity, in English.
+ *
+ *  Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
+ */
+@property(nonatomic, copy, nullable) NSString *descriptionProperty;
+
+/** Opaque entity ID. */
+@property(nonatomic, copy, nullable) NSString *entityId;
+
+/**
+ *  Overall relevancy score for the entity.
+ *  Not normalized and not comparable across different image queries.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+@end
+
+
+/**
+ *  Metadata for online images.
+ */
+@interface GTLRVision_WebImage : GTLRObject
+
+/**
+ *  (Deprecated) Overall relevancy score for the image.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result image URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  Label to provide extra metadata for the web detection.
+ */
+@interface GTLRVision_WebLabel : GTLRObject
+
+/** Label for extra metadata. */
+@property(nonatomic, copy, nullable) NSString *label;
+
+/**
+ *  The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
+ *  For more information, see
+ *  http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
+ */
+@property(nonatomic, copy, nullable) NSString *languageCode;
+
+@end
+
+
+/**
+ *  Metadata for web pages.
+ */
+@interface GTLRVision_WebPage : GTLRObject
+
+/**
+ *  Fully matching images on the page.
+ *  Can include resized copies of the query image.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *fullMatchingImages;
+
+/** Title for the web page, may contain HTML markups. */
+@property(nonatomic, copy, nullable) NSString *pageTitle;
+
+/**
+ *  Partial matching images on the page.
+ *  Those images are similar enough to share some key-point features. For
+ *  example an original image will likely have partial matching for its
+ *  crops.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *partialMatchingImages;
+
+/**
+ *  (Deprecated) Overall relevancy score for the web page.
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *score;
+
+/** The result web page URL. */
+@property(nonatomic, copy, nullable) NSString *url;
+
+@end
+
+
+/**
+ *  A word representation.
+ */
+@interface GTLRVision_Word : GTLRObject
+
+/**
+ *  The bounding box for the word.
+ *  The vertices are in the order of top-left, top-right, bottom-right,
+ *  bottom-left. When a rotation of the bounding box is detected the rotation
+ *  is represented as around the top-left corner as defined when the text is
+ *  read in the 'natural' orientation.
+ *  For example:
+ *  * when the text is horizontal it might look like:
+ *  0----1
+ *  | |
+ *  3----2
+ *  * when it's rotated 180 degrees around the top-left corner it becomes:
+ *  2----3
+ *  | |
+ *  1----0
+ *  and the vertex order will still be (0, 1, 2, 3).
+ */
+@property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
+
+/**
+ *  Confidence of the OCR results for the word. Range [0, 1].
+ *
+ *  Uses NSNumber of floatValue.
+ */
+@property(nonatomic, strong, nullable) NSNumber *confidence;
+
+/** Additional information detected for the word. */
+@property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
+
+/**
+ *  List of symbols in the word.
+ *  The order of the symbols follows the natural reading order.
+ */
+@property(nonatomic, strong, nullable) NSArray<GTLRVision_Symbol *> *symbols;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#pragma clang diagnostic pop

+ 1235 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionQuery.h

@@ -0,0 +1,1235 @@
+// NOTE: This file was generated by the ServiceGenerator.
+
+// ----------------------------------------------------------------------------
+// API:
+//   Cloud Vision API (vision/v1)
+// Description:
+//   Integrates Google Vision features, including image labeling, face, logo,
+//   and landmark detection, optical character recognition (OCR), and detection
+//   of explicit content, into applications.
+// Documentation:
+//   https://cloud.google.com/vision/
+
+#if GTLR_BUILT_AS_FRAMEWORK
+  #import "GTLR/GTLRQuery.h"
+#else
+  #import "GTLRQuery.h"
+#endif
+
+#if GTLR_RUNTIME_VERSION != 3000
+#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
+#endif
+
+@class GTLRVision_AddProductToProductSetRequest;
+@class GTLRVision_AsyncBatchAnnotateFilesRequest;
+@class GTLRVision_BatchAnnotateImagesRequest;
+@class GTLRVision_CancelOperationRequest;
+@class GTLRVision_ImportProductSetsRequest;
+@class GTLRVision_Product;
+@class GTLRVision_ProductSet;
+@class GTLRVision_ReferenceImage;
+@class GTLRVision_RemoveProductFromProductSetRequest;
+
+// Generated comments include content from the discovery document; avoid them
+// causing warnings since clang's checks are some what arbitrary.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdocumentation"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  Parent class for other Vision query classes.
+ */
+@interface GTLRVisionQuery : GTLRQuery
+
+/** Selector specifying which fields to include in a partial response. */
+@property(nonatomic, copy, nullable) NSString *fields;
+
+@end
+
+/**
+ *  Run asynchronous image detection and annotation for a list of generic
+ *  files, such as PDF files, which may contain multiple pages and multiple
+ *  images per page. Progress and results can be retrieved through the
+ *  `google.longrunning.Operations` interface.
+ *  `Operation.metadata` contains `OperationMetadata` (metadata).
+ *  `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results).
+ *
+ *  Method: vision.files.asyncBatchAnnotate
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_FilesAsyncBatchAnnotate : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForFilesAsyncBatchAnnotateWithObject:]
+
+/**
+ *  Fetches a @c GTLRVision_Operation.
+ *
+ *  Run asynchronous image detection and annotation for a list of generic
+ *  files, such as PDF files, which may contain multiple pages and multiple
+ *  images per page. Progress and results can be retrieved through the
+ *  `google.longrunning.Operations` interface.
+ *  `Operation.metadata` contains `OperationMetadata` (metadata).
+ *  `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results).
+ *
+ *  @param object The @c GTLRVision_AsyncBatchAnnotateFilesRequest to include in
+ *    the query.
+ *
+ *  @return GTLRVisionQuery_FilesAsyncBatchAnnotate
+ */
++ (instancetype)queryWithObject:(GTLRVision_AsyncBatchAnnotateFilesRequest *)object;
+
+@end
+
+/**
+ *  Run image detection and annotation for a batch of images.
+ *
+ *  Method: vision.images.annotate
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ImagesAnnotate : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForImagesAnnotateWithObject:]
+
+/**
+ *  Fetches a @c GTLRVision_BatchAnnotateImagesResponse.
+ *
+ *  Run image detection and annotation for a batch of images.
+ *
+ *  @param object The @c GTLRVision_BatchAnnotateImagesRequest to include in the
+ *    query.
+ *
+ *  @return GTLRVisionQuery_ImagesAnnotate
+ */
++ (instancetype)queryWithObject:(GTLRVision_BatchAnnotateImagesRequest *)object;
+
+@end
+
+/**
+ *  Gets the latest state of a long-running operation. Clients can use this
+ *  method to poll the operation result at intervals as recommended by the API
+ *  service.
+ *
+ *  Method: vision.locations.operations.get
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_LocationsOperationsGet : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForLocationsOperationsGetWithname:]
+
+/** The name of the operation resource. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Operation.
+ *
+ *  Gets the latest state of a long-running operation. Clients can use this
+ *  method to poll the operation result at intervals as recommended by the API
+ *  service.
+ *
+ *  @param name The name of the operation resource.
+ *
+ *  @return GTLRVisionQuery_LocationsOperationsGet
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Starts asynchronous cancellation on a long-running operation. The server
+ *  makes a best effort to cancel the operation, but success is not
+ *  guaranteed. If the server doesn't support this method, it returns
+ *  `google.rpc.Code.UNIMPLEMENTED`. Clients can use
+ *  Operations.GetOperation or
+ *  other methods to check whether the cancellation succeeded or whether the
+ *  operation completed despite cancellation. On successful cancellation,
+ *  the operation is not deleted; instead, it becomes an operation with
+ *  an Operation.error value with a google.rpc.Status.code of 1,
+ *  corresponding to `Code.CANCELLED`.
+ *
+ *  Method: vision.operations.cancel
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_OperationsCancel : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForOperationsCancelWithObject:name:]
+
+/** The name of the operation resource to be cancelled. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Starts asynchronous cancellation on a long-running operation. The server
+ *  makes a best effort to cancel the operation, but success is not
+ *  guaranteed. If the server doesn't support this method, it returns
+ *  `google.rpc.Code.UNIMPLEMENTED`. Clients can use
+ *  Operations.GetOperation or
+ *  other methods to check whether the cancellation succeeded or whether the
+ *  operation completed despite cancellation. On successful cancellation,
+ *  the operation is not deleted; instead, it becomes an operation with
+ *  an Operation.error value with a google.rpc.Status.code of 1,
+ *  corresponding to `Code.CANCELLED`.
+ *
+ *  @param object The @c GTLRVision_CancelOperationRequest to include in the
+ *    query.
+ *  @param name The name of the operation resource to be cancelled.
+ *
+ *  @return GTLRVisionQuery_OperationsCancel
+ */
++ (instancetype)queryWithObject:(GTLRVision_CancelOperationRequest *)object
+                           name:(NSString *)name;
+
+@end
+
+/**
+ *  Deletes a long-running operation. This method indicates that the client is
+ *  no longer interested in the operation result. It does not cancel the
+ *  operation. If the server doesn't support this method, it returns
+ *  `google.rpc.Code.UNIMPLEMENTED`.
+ *
+ *  Method: vision.operations.delete
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_OperationsDelete : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForOperationsDeleteWithname:]
+
+/** The name of the operation resource to be deleted. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Deletes a long-running operation. This method indicates that the client is
+ *  no longer interested in the operation result. It does not cancel the
+ *  operation. If the server doesn't support this method, it returns
+ *  `google.rpc.Code.UNIMPLEMENTED`.
+ *
+ *  @param name The name of the operation resource to be deleted.
+ *
+ *  @return GTLRVisionQuery_OperationsDelete
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Gets the latest state of a long-running operation. Clients can use this
+ *  method to poll the operation result at intervals as recommended by the API
+ *  service.
+ *
+ *  Method: vision.operations.get
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_OperationsGet : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForOperationsGetWithname:]
+
+/** The name of the operation resource. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Operation.
+ *
+ *  Gets the latest state of a long-running operation. Clients can use this
+ *  method to poll the operation result at intervals as recommended by the API
+ *  service.
+ *
+ *  @param name The name of the operation resource.
+ *
+ *  @return GTLRVisionQuery_OperationsGet
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Lists operations that match the specified filter in the request. If the
+ *  server doesn't support this method, it returns `UNIMPLEMENTED`.
+ *  NOTE: the `name` binding allows API services to override the binding
+ *  to use different resource name schemes, such as `users/ * /operations`. To
+ *  override the binding, API services can add a binding such as
+ *  `"/v1/{name=users/ *}/operations"` to their service configuration.
+ *  For backwards compatibility, the default name includes the operations
+ *  collection id, however overriding users must ensure the name binding
+ *  is the parent resource, without the operations collection id.
+ *
+ *  Method: vision.operations.list
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_OperationsList : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForOperationsListWithname:]
+
+/** The standard list filter. */
+@property(nonatomic, copy, nullable) NSString *filter;
+
+/** The name of the operation's parent resource. */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/** The standard list page size. */
+@property(nonatomic, assign) NSInteger pageSize;
+
+/** The standard list page token. */
+@property(nonatomic, copy, nullable) NSString *pageToken;
+
+/**
+ *  Fetches a @c GTLRVision_ListOperationsResponse.
+ *
+ *  Lists operations that match the specified filter in the request. If the
+ *  server doesn't support this method, it returns `UNIMPLEMENTED`.
+ *  NOTE: the `name` binding allows API services to override the binding
+ *  to use different resource name schemes, such as `users/ * /operations`. To
+ *  override the binding, API services can add a binding such as
+ *  `"/v1/{name=users/ *}/operations"` to their service configuration.
+ *  For backwards compatibility, the default name includes the operations
+ *  collection id, however overriding users must ensure the name binding
+ *  is the parent resource, without the operations collection id.
+ *
+ *  @param name The name of the operation's parent resource.
+ *
+ *  @return GTLRVisionQuery_OperationsList
+ *
+ *  @note Automatic pagination will be done when @c shouldFetchNextPages is
+ *        enabled. See @c shouldFetchNextPages on @c GTLRService for more
+ *        information.
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Creates and returns a new product resource.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096
+ *  characters.
+ *  * Returns INVALID_ARGUMENT if description is longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if product_category is missing or invalid.
+ *
+ *  Method: vision.projects.locations.products.create
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsCreate : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsCreateWithObject:parent:]
+
+/**
+ *  The project in which the Product should be created.
+ *  Format is
+ *  `projects/PROJECT_ID/locations/LOC_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  A user-supplied resource id for this Product. If set, the server will
+ *  attempt to use this value as the resource id. If it is already in use, an
+ *  error is returned with code ALREADY_EXISTS. Must be at most 128 characters
+ *  long. It cannot contain the character `/`.
+ */
+@property(nonatomic, copy, nullable) NSString *productId;
+
+/**
+ *  Fetches a @c GTLRVision_Product.
+ *
+ *  Creates and returns a new product resource.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096
+ *  characters.
+ *  * Returns INVALID_ARGUMENT if description is longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if product_category is missing or invalid.
+ *
+ *  @param object The @c GTLRVision_Product to include in the query.
+ *  @param parent The project in which the Product should be created.
+ *    Format is
+ *    `projects/PROJECT_ID/locations/LOC_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsCreate
+ */
++ (instancetype)queryWithObject:(GTLRVision_Product *)object
+                         parent:(NSString *)parent;
+
+@end
+
+/**
+ *  Permanently deletes a product and its reference images.
+ *  Metadata of the product and all its images will be deleted right away, but
+ *  search queries against ProductSets containing the product may still work
+ *  until all related caches are refreshed.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the product does not exist.
+ *
+ *  Method: vision.projects.locations.products.delete
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsDelete : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsDeleteWithname:]
+
+/**
+ *  Resource name of product to delete.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Permanently deletes a product and its reference images.
+ *  Metadata of the product and all its images will be deleted right away, but
+ *  search queries against ProductSets containing the product may still work
+ *  until all related caches are refreshed.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the product does not exist.
+ *
+ *  @param name Resource name of product to delete.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsDelete
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Adds a Product to the specified ProductSet. If the Product is already
+ *  present, no change is made.
+ *  One Product can be added to at most 100 ProductSets.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product or the ProductSet doesn't exist.
+ *
+ *  Method: vision.projects.locations.productSets.addProduct
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsAddProduct : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsAddProductWithObject:name:]
+
+/**
+ *  The resource name for the ProductSet to modify.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Adds a Product to the specified ProductSet. If the Product is already
+ *  present, no change is made.
+ *  One Product can be added to at most 100 ProductSets.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product or the ProductSet doesn't exist.
+ *
+ *  @param object The @c GTLRVision_AddProductToProductSetRequest to include in
+ *    the query.
+ *  @param name The resource name for the ProductSet to modify.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsAddProduct
+ */
++ (instancetype)queryWithObject:(GTLRVision_AddProductToProductSetRequest *)object
+                           name:(NSString *)name;
+
+@end
+
+/**
+ *  Creates and returns a new ProductSet resource.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if display_name is missing, or is longer than
+ *  4096 characters.
+ *
+ *  Method: vision.projects.locations.productSets.create
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsCreate : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsCreateWithObject:parent:]
+
+/**
+ *  The project in which the ProductSet should be created.
+ *  Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  A user-supplied resource id for this ProductSet. If set, the server will
+ *  attempt to use this value as the resource id. If it is already in use, an
+ *  error is returned with code ALREADY_EXISTS. Must be at most 128 characters
+ *  long. It cannot contain the character `/`.
+ */
+@property(nonatomic, copy, nullable) NSString *productSetId;
+
+/**
+ *  Fetches a @c GTLRVision_ProductSet.
+ *
+ *  Creates and returns a new ProductSet resource.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if display_name is missing, or is longer than
+ *  4096 characters.
+ *
+ *  @param object The @c GTLRVision_ProductSet to include in the query.
+ *  @param parent The project in which the ProductSet should be created.
+ *    Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsCreate
+ */
++ (instancetype)queryWithObject:(GTLRVision_ProductSet *)object
+                         parent:(NSString *)parent;
+
+@end
+
+/**
+ *  Permanently deletes a ProductSet. Products and ReferenceImages in the
+ *  ProductSet are not deleted.
+ *  The actual image files are not deleted from Google Cloud Storage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *
+ *  Method: vision.projects.locations.productSets.delete
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsDelete : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsDeleteWithname:]
+
+/**
+ *  Resource name of the ProductSet to delete.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Permanently deletes a ProductSet. Products and ReferenceImages in the
+ *  ProductSet are not deleted.
+ *  The actual image files are not deleted from Google Cloud Storage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *
+ *  @param name Resource name of the ProductSet to delete.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsDelete
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Gets information associated with a ProductSet.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *
+ *  Method: vision.projects.locations.productSets.get
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsGet : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsGetWithname:]
+
+/**
+ *  Resource name of the ProductSet to get.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_ProductSet.
+ *
+ *  Gets information associated with a ProductSet.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *
+ *  @param name Resource name of the ProductSet to get.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsGet
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Asynchronous API that imports a list of reference images to specified
+ *  product sets based on a list of image information.
+ *  The google.longrunning.Operation API can be used to keep track of the
+ *  progress and results of the request.
+ *  `Operation.metadata` contains `BatchOperationMetadata`. (progress)
+ *  `Operation.response` contains `ImportProductSetsResponse`. (results)
+ *  The input source of this method is a csv file on Google Cloud Storage.
+ *  For the format of the csv file please see
+ *  ImportProductSetsGcsSource.csv_file_uri.
+ *
+ *  Method: vision.projects.locations.productSets.import
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsImport : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsImportWithObject:parent:]
+
+/**
+ *  The project in which the ProductSets should be imported.
+ *  Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  Fetches a @c GTLRVision_Operation.
+ *
+ *  Asynchronous API that imports a list of reference images to specified
+ *  product sets based on a list of image information.
+ *  The google.longrunning.Operation API can be used to keep track of the
+ *  progress and results of the request.
+ *  `Operation.metadata` contains `BatchOperationMetadata`. (progress)
+ *  `Operation.response` contains `ImportProductSetsResponse`. (results)
+ *  The input source of this method is a csv file on Google Cloud Storage.
+ *  For the format of the csv file please see
+ *  ImportProductSetsGcsSource.csv_file_uri.
+ *
+ *  @param object The @c GTLRVision_ImportProductSetsRequest to include in the
+ *    query.
+ *  @param parent The project in which the ProductSets should be imported.
+ *    Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsImport
+ */
++ (instancetype)queryWithObject:(GTLRVision_ImportProductSetsRequest *)object
+                         parent:(NSString *)parent;
+
+@end
+
+/**
+ *  Lists ProductSets in an unspecified order.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100, or less
+ *  than 1.
+ *
+ *  Method: vision.projects.locations.productSets.list
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsList : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsListWithparent:]
+
+/** The maximum number of items to return. Default 10, maximum 100. */
+@property(nonatomic, assign) NSInteger pageSize;
+
+/** The next_page_token returned from a previous List request, if any. */
+@property(nonatomic, copy, nullable) NSString *pageToken;
+
+/**
+ *  The project from which ProductSets should be listed.
+ *  Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  Fetches a @c GTLRVision_ListProductSetsResponse.
+ *
+ *  Lists ProductSets in an unspecified order.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100, or less
+ *  than 1.
+ *
+ *  @param parent The project from which ProductSets should be listed.
+ *    Format is `projects/PROJECT_ID/locations/LOC_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsList
+ *
+ *  @note Automatic pagination will be done when @c shouldFetchNextPages is
+ *        enabled. See @c shouldFetchNextPages on @c GTLRService for more
+ *        information.
+ */
++ (instancetype)queryWithParent:(NSString *)parent;
+
+@end
+
+/**
+ *  Makes changes to a ProductSet resource.
+ *  Only display_name can be updated currently.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *  * Returns INVALID_ARGUMENT if display_name is present in update_mask but
+ *  missing from the request or longer than 4096 characters.
+ *
+ *  Method: vision.projects.locations.productSets.patch
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsPatch : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsPatchWithObject:name:]
+
+/**
+ *  The resource name of the ProductSet.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
+ *  This field is ignored when creating a ProductSet.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The FieldMask that specifies which fields to
+ *  update.
+ *  If update_mask isn't specified, all mutable fields are to be updated.
+ *  Valid mask path is `display_name`.
+ *
+ *  String format is a comma-separated list of fields.
+ */
+@property(nonatomic, copy, nullable) NSString *updateMask;
+
+/**
+ *  Fetches a @c GTLRVision_ProductSet.
+ *
+ *  Makes changes to a ProductSet resource.
+ *  Only display_name can be updated currently.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the ProductSet does not exist.
+ *  * Returns INVALID_ARGUMENT if display_name is present in update_mask but
+ *  missing from the request or longer than 4096 characters.
+ *
+ *  @param object The @c GTLRVision_ProductSet to include in the query.
+ *  @param name The resource name of the ProductSet.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
+ *    This field is ignored when creating a ProductSet.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsPatch
+ */
++ (instancetype)queryWithObject:(GTLRVision_ProductSet *)object
+                           name:(NSString *)name;
+
+@end
+
+/**
+ *  Lists the Products in a ProductSet, in an unspecified order. If the
+ *  ProductSet does not exist, the products field of the response will be
+ *  empty.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1.
+ *
+ *  Method: vision.projects.locations.productSets.products.list
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsProductsList : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsProductsListWithname:]
+
+/**
+ *  The ProductSet resource for which to retrieve Products.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/** The maximum number of items to return. Default 10, maximum 100. */
+@property(nonatomic, assign) NSInteger pageSize;
+
+/** The next_page_token returned from a previous List request, if any. */
+@property(nonatomic, copy, nullable) NSString *pageToken;
+
+/**
+ *  Fetches a @c GTLRVision_ListProductsInProductSetResponse.
+ *
+ *  Lists the Products in a ProductSet, in an unspecified order. If the
+ *  ProductSet does not exist, the products field of the response will be
+ *  empty.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1.
+ *
+ *  @param name The ProductSet resource for which to retrieve Products.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsProductsList
+ *
+ *  @note Automatic pagination will be done when @c shouldFetchNextPages is
+ *        enabled. See @c shouldFetchNextPages on @c GTLRService for more
+ *        information.
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Removes a Product from the specified ProductSet.
+ *  Possible errors:
+ *  * Returns NOT_FOUND If the Product is not found under the ProductSet.
+ *
+ *  Method: vision.projects.locations.productSets.removeProduct
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductSetsRemoveProduct : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductSetsRemoveProductWithObject:name:]
+
+/**
+ *  The resource name for the ProductSet to modify.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Removes a Product from the specified ProductSet.
+ *  Possible errors:
+ *  * Returns NOT_FOUND If the Product is not found under the ProductSet.
+ *
+ *  @param object The @c GTLRVision_RemoveProductFromProductSetRequest to
+ *    include in the query.
+ *  @param name The resource name for the ProductSet to modify.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductSetsRemoveProduct
+ */
++ (instancetype)queryWithObject:(GTLRVision_RemoveProductFromProductSetRequest *)object
+                           name:(NSString *)name;
+
+@end
+
+/**
+ *  Gets information associated with a Product.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product does not exist.
+ *
+ *  Method: vision.projects.locations.products.get
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsGet : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsGetWithname:]
+
+/**
+ *  Resource name of the Product to get.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Product.
+ *
+ *  Gets information associated with a Product.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product does not exist.
+ *
+ *  @param name Resource name of the Product to get.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsGet
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Lists products in an unspecified order.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1.
+ *
+ *  Method: vision.projects.locations.products.list
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsList : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsListWithparent:]
+
+/** The maximum number of items to return. Default 10, maximum 100. */
+@property(nonatomic, assign) NSInteger pageSize;
+
+/** The next_page_token returned from a previous List request, if any. */
+@property(nonatomic, copy, nullable) NSString *pageToken;
+
+/**
+ *  The project OR ProductSet from which Products should be listed.
+ *  Format:
+ *  `projects/PROJECT_ID/locations/LOC_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  Fetches a @c GTLRVision_ListProductsResponse.
+ *
+ *  Lists products in an unspecified order.
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1.
+ *
+ *  @param parent The project OR ProductSet from which Products should be
+ *    listed.
+ *    Format:
+ *    `projects/PROJECT_ID/locations/LOC_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsList
+ *
+ *  @note Automatic pagination will be done when @c shouldFetchNextPages is
+ *        enabled. See @c shouldFetchNextPages on @c GTLRService for more
+ *        information.
+ */
++ (instancetype)queryWithParent:(NSString *)parent;
+
+@end
+
+/**
+ *  Makes changes to a Product resource.
+ *  Only the `display_name`, `description`, and `labels` fields can be updated
+ *  right now.
+ *  If labels are updated, the change will not be reflected in queries until
+ *  the next index time.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product does not exist.
+ *  * Returns INVALID_ARGUMENT if display_name is present in update_mask but is
+ *  missing from the request or longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if description is present in update_mask but is
+ *  longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if product_category is present in update_mask.
+ *
+ *  Method: vision.projects.locations.products.patch
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsPatch : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsPatchWithObject:name:]
+
+/**
+ *  The resource name of the product.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *  This field is ignored when creating a product.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  The FieldMask that specifies which fields
+ *  to update.
+ *  If update_mask isn't specified, all mutable fields are to be updated.
+ *  Valid mask paths include `product_labels`, `display_name`, and
+ *  `description`.
+ *
+ *  String format is a comma-separated list of fields.
+ */
+@property(nonatomic, copy, nullable) NSString *updateMask;
+
+/**
+ *  Fetches a @c GTLRVision_Product.
+ *
+ *  Makes changes to a Product resource.
+ *  Only the `display_name`, `description`, and `labels` fields can be updated
+ *  right now.
+ *  If labels are updated, the change will not be reflected in queries until
+ *  the next index time.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the Product does not exist.
+ *  * Returns INVALID_ARGUMENT if display_name is present in update_mask but is
+ *  missing from the request or longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if description is present in update_mask but is
+ *  longer than 4096 characters.
+ *  * Returns INVALID_ARGUMENT if product_category is present in update_mask.
+ *
+ *  @param object The @c GTLRVision_Product to include in the query.
+ *  @param name The resource name of the product.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *    This field is ignored when creating a product.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsPatch
+ */
++ (instancetype)queryWithObject:(GTLRVision_Product *)object
+                           name:(NSString *)name;
+
+@end
+
+/**
+ *  Creates and returns a new ReferenceImage resource.
+ *  The `bounding_poly` field is optional. If `bounding_poly` is not specified,
+ *  the system will try to detect regions of interest in the image that are
+ *  compatible with the product_category on the parent product. If it is
+ *  specified, detection is ALWAYS skipped. The system converts polygons into
+ *  non-rotated rectangles.
+ *  Note that the pipeline will resize the image if the image resolution is too
+ *  large to process (above 50MP).
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096
+ *  characters.
+ *  * Returns INVALID_ARGUMENT if the product does not exist.
+ *  * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing
+ *  compatible with the parent product's product_category is detected.
+ *  * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons.
+ *
+ *  Method: vision.projects.locations.products.referenceImages.create
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesCreate : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsReferenceImagesCreateWithObject:parent:]
+
+/**
+ *  Resource name of the product in which to create the reference image.
+ *  Format is
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  A user-supplied resource id for the ReferenceImage to be added. If set,
+ *  the server will attempt to use this value as the resource id. If it is
+ *  already in use, an error is returned with code ALREADY_EXISTS. Must be at
+ *  most 128 characters long. It cannot contain the character `/`.
+ */
+@property(nonatomic, copy, nullable) NSString *referenceImageId;
+
+/**
+ *  Fetches a @c GTLRVision_ReferenceImage.
+ *
+ *  Creates and returns a new ReferenceImage resource.
+ *  The `bounding_poly` field is optional. If `bounding_poly` is not specified,
+ *  the system will try to detect regions of interest in the image that are
+ *  compatible with the product_category on the parent product. If it is
+ *  specified, detection is ALWAYS skipped. The system converts polygons into
+ *  non-rotated rectangles.
+ *  Note that the pipeline will resize the image if the image resolution is too
+ *  large to process (above 50MP).
+ *  Possible errors:
+ *  * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096
+ *  characters.
+ *  * Returns INVALID_ARGUMENT if the product does not exist.
+ *  * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing
+ *  compatible with the parent product's product_category is detected.
+ *  * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons.
+ *
+ *  @param object The @c GTLRVision_ReferenceImage to include in the query.
+ *  @param parent Resource name of the product in which to create the reference
+ *    image.
+ *    Format is
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesCreate
+ */
++ (instancetype)queryWithObject:(GTLRVision_ReferenceImage *)object
+                         parent:(NSString *)parent;
+
+@end
+
+/**
+ *  Permanently deletes a reference image.
+ *  The image metadata will be deleted right away, but search queries
+ *  against ProductSets containing the image may still work until all related
+ *  caches are refreshed.
+ *  The actual image files are not deleted from Google Cloud Storage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the reference image does not exist.
+ *
+ *  Method: vision.projects.locations.products.referenceImages.delete
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesDelete : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsReferenceImagesDeleteWithname:]
+
+/**
+ *  The resource name of the reference image to delete.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_Empty.
+ *
+ *  Permanently deletes a reference image.
+ *  The image metadata will be deleted right away, but search queries
+ *  against ProductSets containing the image may still work until all related
+ *  caches are refreshed.
+ *  The actual image files are not deleted from Google Cloud Storage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the reference image does not exist.
+ *
+ *  @param name The resource name of the reference image to delete.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesDelete
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Gets information associated with a ReferenceImage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the specified image does not exist.
+ *
+ *  Method: vision.projects.locations.products.referenceImages.get
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesGet : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsReferenceImagesGetWithname:]
+
+/**
+ *  The resource name of the ReferenceImage to get.
+ *  Format is:
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *name;
+
+/**
+ *  Fetches a @c GTLRVision_ReferenceImage.
+ *
+ *  Gets information associated with a ReferenceImage.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the specified image does not exist.
+ *
+ *  @param name The resource name of the ReferenceImage to get.
+ *    Format is:
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesGet
+ */
++ (instancetype)queryWithName:(NSString *)name;
+
+@end
+
+/**
+ *  Lists reference images.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the parent product does not exist.
+ *  * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less
+ *  than 1.
+ *
+ *  Method: vision.projects.locations.products.referenceImages.list
+ *
+ *  Authorization scope(s):
+ *    @c kGTLRAuthScopeVisionCloudPlatform
+ *    @c kGTLRAuthScopeVisionCloudVision
+ */
+@interface GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesList : GTLRVisionQuery
+// Previous library name was
+//   +[GTLQueryVision queryForProjectsLocationsProductsReferenceImagesListWithparent:]
+
+/** The maximum number of items to return. Default 10, maximum 100. */
+@property(nonatomic, assign) NSInteger pageSize;
+
+/**
+ *  A token identifying a page of results to be returned. This is the value
+ *  of `nextPageToken` returned in a previous reference image list request.
+ *  Defaults to the first page if not specified.
+ */
+@property(nonatomic, copy, nullable) NSString *pageToken;
+
+/**
+ *  Resource name of the product containing the reference images.
+ *  Format is
+ *  `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ */
+@property(nonatomic, copy, nullable) NSString *parent;
+
+/**
+ *  Fetches a @c GTLRVision_ListReferenceImagesResponse.
+ *
+ *  Lists reference images.
+ *  Possible errors:
+ *  * Returns NOT_FOUND if the parent product does not exist.
+ *  * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less
+ *  than 1.
+ *
+ *  @param parent Resource name of the product containing the reference images.
+ *    Format is
+ *    `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
+ *
+ *  @return GTLRVisionQuery_ProjectsLocationsProductsReferenceImagesList
+ *
+ *  @note Automatic pagination will be done when @c shouldFetchNextPages is
+ *        enabled. See @c shouldFetchNextPages on @c GTLRService for more
+ *        information.
+ */
++ (instancetype)queryWithParent:(NSString *)parent;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#pragma clang diagnostic pop

+ 88 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Headers/GTLRVisionService.h

@@ -0,0 +1,88 @@
+// NOTE: This file was generated by the ServiceGenerator.
+
+// ----------------------------------------------------------------------------
+// API:
+//   Cloud Vision API (vision/v1)
+// Description:
+//   Integrates Google Vision features, including image labeling, face, logo,
+//   and landmark detection, optical character recognition (OCR), and detection
+//   of explicit content, into applications.
+// Documentation:
+//   https://cloud.google.com/vision/
+
+#if GTLR_BUILT_AS_FRAMEWORK
+  #import "GTLR/GTLRService.h"
+#else
+  #import "GTLRService.h"
+#endif
+
+#if GTLR_RUNTIME_VERSION != 3000
+#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
+#endif
+
+// Generated comments include content from the discovery document; avoid them
+// causing warnings since clang's checks are some what arbitrary.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdocumentation"
+
+NS_ASSUME_NONNULL_BEGIN
+
+// ----------------------------------------------------------------------------
+// Authorization scopes
+
+/**
+ *  Authorization scope: View and manage your data across Google Cloud Platform
+ *  services
+ *
+ *  Value "https://www.googleapis.com/auth/cloud-platform"
+ */
+GTLR_EXTERN NSString * const kGTLRAuthScopeVisionCloudPlatform;
+/**
+ *  Authorization scope: Apply machine learning models to understand and label
+ *  images
+ *
+ *  Value "https://www.googleapis.com/auth/cloud-vision"
+ */
+GTLR_EXTERN NSString * const kGTLRAuthScopeVisionCloudVision;
+
+// ----------------------------------------------------------------------------
+//   GTLRVisionService
+//
+
+/**
+ *  Service for executing Cloud Vision API queries.
+ *
+ *  Integrates Google Vision features, including image labeling, face, logo, and
+ *  landmark detection, optical character recognition (OCR), and detection of
+ *  explicit content, into applications.
+ */
+@interface GTLRVisionService : GTLRService
+
+// No new methods
+
+// Clients should create a standard query with any of the class methods in
+// GTLRVisionQuery.h. The query can the be sent with GTLRService's execute
+// methods,
+//
+//   - (GTLRServiceTicket *)executeQuery:(GTLRQuery *)query
+//                     completionHandler:(void (^)(GTLRServiceTicket *ticket,
+//                                                 id object, NSError *error))handler;
+// or
+//   - (GTLRServiceTicket *)executeQuery:(GTLRQuery *)query
+//                              delegate:(id)delegate
+//                     didFinishSelector:(SEL)finishedSelector;
+//
+// where finishedSelector has a signature of:
+//
+//   - (void)serviceTicket:(GTLRServiceTicket *)ticket
+//      finishedWithObject:(id)object
+//                   error:(NSError *)error;
+//
+// The object passed to the completion handler or delegate method
+// is a subclass of GTLRObject, determined by the query method executed.
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#pragma clang diagnostic pop

+ 5 - 0
Libraries external/Firebase/MLVision/GoogleAPIClientForREST.framework/Modules/module.modulemap

@@ -0,0 +1,5 @@
+framework module GoogleAPIClientForREST {
+umbrella header "GoogleAPIClientForREST.h"
+export *
+module * { export * }
+}

BIN
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/GoogleMobileVision


+ 42 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVDetector.h

@@ -0,0 +1,42 @@
+#import <AVFoundation/AVFoundation.h>
+
+#import "GMVFeature.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/**
+ *  A GMVDetector object uses image processing to search for and identify notable features in a
+ *  still image or video. Detected features are represented by GMVFeature objects that provide more
+ *  information about each feature.
+ */
+@interface GMVDetector : NSObject
+
+/**
+ *  Creates and returns a configured detector.
+ *  @param type Identifies which type of detector should be created.
+ *  @param options Configuration options for the detector.
+ *  @return The |type| detector. The detector maybe nil if initialization failed.
+ */
++ (nullable GMVDetector *)detectorOfType:(NSString *)type options:(nullable NSDictionary *)options;
+
+/**
+ *  Searches for features in an image.
+ *  @param options Configuration options for runtime detection. Use the GMVDetectorImageOrientation
+ *  key to specify the image orientation.
+ *  @return The array of detected features, or nil if an error occurred.
+ */
+- (nullable NSArray<__kindof GMVFeature *> *)featuresInImage:(UIImage *)image
+                                                     options:(nullable NSDictionary *)options;
+
+/**
+ *  Searches for features in an image buffer.
+ *  @param options Configuration options for runtime detection. Use the GMVDetectorImageOrientation
+ *  key to specify the image orientation.
+ *  @return The array of detected features, or nil if an error occurred.
+ */
+- (nullable NSArray<__kindof GMVFeature *> *)featuresInBuffer:(CMSampleBufferRef)sampleBuffer
+                                                      options:(nullable NSDictionary *)options;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 461 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVDetectorConstants.h

@@ -0,0 +1,461 @@
+#ifndef GMVDetector_GMVDetectorConstants_h
+#define GMVDetector_GMVDetectorConstants_h
+
+#import <Foundation/Foundation.h>
+
+/** @file GMVDetectorConstants.h
+ *  Detector constants.
+ */
+
+/** Possible error codes returned by GMVDetector. */
+typedef NS_ENUM(NSInteger, GMVDetectorError) {
+  GMVDetectorInvalidInput = -301
+};
+
+/**
+ * @enum GMVImageOrientation
+ * This enumeration specifies where the origin (0,0) of the image is located. The constant
+ * has the same value as defined by EXIF specifications.
+ */
+typedef NS_ENUM(NSInteger, GMVImageOrientation) {
+  /**
+   * Orientation code indicating the 0th row is the top and the 0th column is the left side.
+   */
+  GMVImageOrientationTopLeft = 1,
+  /**
+   * Orientation code indicating the 0th row is the top and the 0th column is the right side.
+   */
+  GMVImageOrientationTopRight = 2,
+  /**
+   * Orientation code indicating the 0th row is the bottom and the 0th column is the right side.
+   */
+  GMVImageOrientationBottomRight = 3,
+  /**
+   * Orientation code indicating the 0th row is the bottom and the 0th column is the left side.
+   */
+  GMVImageOrientationBottomLeft = 4,
+  /**
+   * Orientation code indicating the 0th row is the left side and the 0th column is the top.
+   */
+  GMVImageOrientationLeftTop = 5,
+  /**
+   * Orientation code indicating the 0th row is the right side and the 0th column is the top.
+   */
+  GMVImageOrientationRightTop = 6,
+  /**
+   * Orientation code indicating the 0th row is the right side and the 0th column is the bottom.
+   */
+  GMVImageOrientationRightBottom = 7,
+  /**
+   * Orientation code indicating the 0th row is the left side and the 0th column is the
+   * bottom.
+   */
+  GMVImageOrientationLeftBottom = 8
+};
+
+/**
+ * @enum GMVDetectorFaceModeOption
+ * This enum specifies a preference for accuracy vs. speed trade-offs.
+ */
+typedef NS_ENUM(NSInteger, GMVDetectorFaceModeOption) {
+  /**
+   * Face detection mode code indicating detect fewer faces and may be less precise in determining
+   * values such as position, but will run faster.
+   */
+  GMVDetectorFaceFastMode = 200,
+  /**
+   * Face detection mode code indicating detect more faces and may be more precise in determining
+   * values such as position, at the cost of speed.
+   */
+  GMVDetectorFaceAccurateMode = 201,
+  /**
+   *  Face detection mode code indicating detect predominant faces appeared in self-photography
+   *  style and may be not detecting smaller and further away faces.
+   */
+  GMVDetectorFaceSelfieMode = 202
+};
+
+/**
+ * @options GMVDetectorFaceLandmark
+ * This option specifies the landmark detection type.
+ */
+typedef NS_OPTIONS(NSInteger, GMVDetectorFaceLandmark) {
+  /**
+   * Face landmark option indicating it performs no landmark detection.
+   */
+  GMVDetectorFaceLandmarkNone = 1 << 0,
+  /**
+   * Face landmark option indicating it performs all landmark detection.
+   */
+  GMVDetectorFaceLandmarkAll = 1 << 1,
+  /**
+   * Face landmark option indicating it performs contour detection.
+   */
+  GMVDetectorFaceLandmarkContour = 1 << 2
+};
+
+/**
+ * @options GMVDetectorFaceClassification
+ * This option specifies the classification type.
+ */
+typedef NS_OPTIONS(NSInteger, GMVDetectorFaceClassification) {
+  /**
+   * Face classification option indicating it performs no classification.
+   */
+  GMVDetectorFaceClassificationNone = 1 << 0,
+  /**
+   * Face classification option indicating it performs all classification.
+   */
+  GMVDetectorFaceClassificationAll = 1 << 1
+};
+
+/** This value is the default score threshold set on label detectors. */
+extern const float kGMVDetectorLabelScoreThresholdDefaultValue;
+
+/**
+ * @enum GMVBarcodeFeatureEmailType
+ * This enum specifies the email type for GMVBarcodeFeatureEmail.
+ */
+typedef NS_ENUM(NSInteger, GMVBarcodeFeatureEmailType) {
+  /**
+   * Unknown email type.
+   */
+  GMVBarcodeFeatureEmailTypeUnknown = 0,
+  /**
+   * Barcode feature work email type.
+   */
+  GMVBarcodeFeatureEmailTypeWork = 1,
+  /**
+   * Barcode feature home email type.
+   */
+  GMVBarcodeFeatureEmailTypeHome = 2
+};
+
+/**
+ * @enum GMVBarcodeFeaturePhoneType
+ * This enum specifies the phone type for GMVBarcodeFeaturePhone.
+ */
+typedef NS_ENUM(NSInteger, GMVBarcodeFeaturePhoneType) {
+  /**
+   * Unknown phone type.
+   */
+  GMVBarcodeFeaturePhoneTypeUnknown = 0,
+  /**
+   * Barcode feature work phone type.
+   */
+  GMVBarcodeFeaturePhoneTypeWork = 1,
+  /**
+   * Barcode feature home phone type.
+   */
+  GMVBarcodeFeaturePhoneTypeHome = 2,
+  /**
+   * Barcode feature fax phone type.
+   */
+  GMVBarcodeFeaturePhoneTypeFax = 3,
+  /**
+   * Barcode feature mobile phone type.
+   */
+  GMVBarcodeFeaturePhoneTypeMobile = 4
+};
+
+/**
+ * @enum GMVBarcodeFeatureWiFiEncryptionType
+ * This enum specifies the Wi-Fi encryption type for GMVBarcodeFeatureWiFi.
+ */
+typedef NS_ENUM(NSInteger, GMVBarcodeFeatureWiFiEncryptionType) {
+  /**
+   * Barcode feature unknown Wi-Fi encryption type.
+   */
+  GMVBarcodeFeatureWiFiEncryptionTypeUnknown = 0,
+  /**
+   * Barcode feature open Wi-Fi encryption type.
+   */
+  GMVBarcodeFeatureWiFiEncryptionTypeOpen = 1,
+  /**
+   * Barcode feature WPA Wi-Fi encryption type.
+   */
+  GMVBarcodeFeatureWiFiEncryptionTypeWPA = 2,
+  /**
+   * Barcode feature WEP Wi-Fi encryption type.
+   */
+  GMVBarcodeFeatureWiFiEncryptionTypeWEP = 3
+};
+
+/**
+ * @enum GMVBarcodeFeatureAddressType
+ * This enum specifies address type.
+ */
+typedef NS_ENUM(NSInteger, GMVBarcodeFeatureAddressType) {
+  /**
+   * Barcode feature unknown address type.
+   */
+  GMVBarcodeFeatureAddressTypeUnknown = 0,
+  /**
+   * Barcode feature work address type.
+   */
+  GMVBarcodeFeatureAddressTypeWork = 1,
+  /**
+   * Barcode feature home address type.
+   */
+  GMVBarcodeFeatureAddressTypeHome = 2
+};
+
+/**
+ * @enum GMVDetectorBarcodeValueFormat
+ * This enum specifies a barcode's value format. For example, TEXT, PRODUCT, URL, etc.
+ */
+typedef NS_ENUM(NSInteger, GMVDetectorBarcodeValueFormat) {
+  /**
+   * Barcode value format for contact info.
+   */
+  GMVDetectorBarcodeValueFormatContactInfo = 1,
+  /**
+   * Barcode value format for email addresses.
+   */
+  GMVDetectorBarcodeValueFormatEmail = 2,
+  /**
+   * Barcode value format for ISBNs.
+   */
+  GMVDetectorBarcodeValueFormatISBN = 3,
+  /**
+   * Barcode value format for phone numbers.
+   */
+  GMVDetectorBarcodeValueFormatPhone = 4,
+  /**
+   * Barcode value format for product codes.
+   */
+  GMVDetectorBarcodeValueFormatProduct = 5,
+  /**
+   * Barcode value format for SMS details.
+   */
+  GMVDetectorBarcodeValueFormatSMS = 6,
+  /**
+   * Barcode value format for plain text.
+   */
+  GMVDetectorBarcodeValueFormatText = 7,
+  /**
+   * Barcode value format for URLs/bookmarks.
+   */
+  GMVDetectorBarcodeValueFormatURL = 8,
+  /**
+   * Barcode value format for Wi-Fi access point details.
+   */
+  GMVDetectorBarcodeValueFormatWiFi = 9,
+  /**
+   * Barcode value format for geographic coordinates.
+   */
+  GMVDetectorBarcodeValueFormatGeographicCoordinates = 10,
+  /**
+   * Barcode value format for calendar events.
+   */
+  GMVDetectorBarcodeValueFormatCalendarEvent = 11,
+  /**
+   * Barcode value format for driver's license data.
+   */
+  GMVDetectorBarcodeValueFormatDriversLicense = 12
+};
+
+/**
+ * @options GMVDetectorBarcodeFormat
+ * This option specifies the barcode formats that the library should detect.
+ */
+typedef NS_OPTIONS(NSInteger, GMVDetectorBarcodeFormat) {
+  /**
+   * Code-128 detection.
+   */
+  GMVDetectorBarcodeFormatCode128 = 0x0001,
+  /**
+   * Code-39 detection.
+   */
+  GMVDetectorBarcodeFormatCode39 = 0x0002,
+  /**
+   * Code-93 detection.
+   */
+  GMVDetectorBarcodeFormatCode93 = 0x0004,
+  /**
+   * Codabar detection.
+   */
+  GMVDetectorBarcodeFormatCodaBar = 0x0008,
+  /**
+   * Data Matrix detection.
+   */
+  GMVDetectorBarcodeFormatDataMatrix = 0x0010,
+  /**
+   * EAN-13 detection.
+   */
+  GMVDetectorBarcodeFormatEAN13 = 0x0020,
+  /**
+   * EAN-8 detection.
+   */
+  GMVDetectorBarcodeFormatEAN8 = 0x0040,
+  /**
+   * ITF detection.
+   */
+  GMVDetectorBarcodeFormatITF = 0x0080,
+  /**
+   * QR Code detection.
+   */
+  GMVDetectorBarcodeFormatQRCode = 0x0100,
+  /**
+   * UPC-A detection.
+   */
+  GMVDetectorBarcodeFormatUPCA = 0x0200,
+  /**
+   * UPC-E detection.
+   */
+  GMVDetectorBarcodeFormatUPCE = 0x0400,
+  /**
+   * PDF-417 detection.
+   */
+  GMVDetectorBarcodeFormatPDF417 = 0x0800,
+  /**
+   * Aztec code detection.
+   */
+  GMVDetectorBarcodeFormatAztec = 0x1000
+};
+
+#pragma mark - Detector type constants
+
+/**
+ * @memberof GMVDetector
+ * A detector that searches for faces in a still image or video, returning GMVFaceFeature
+ * objects that provide information about detected faces.
+ */
+extern NSString * const GMVDetectorTypeFace;
+
+/**
+ * @memberof GMVDetector
+ * A detector that searches for barcodes in a still image or video, returning GMVBarcodeFeature
+ * objects that provide information about detected barcodes.
+ */
+extern NSString * const GMVDetectorTypeBarcode;
+
+/**
+ * @memberof GMVDetector
+ * A detector that does optical character recognition in a still image or video, returning
+ * GMVTextBlockFeature objects that provide information about detected text.
+ */
+extern NSString * const GMVDetectorTypeText;
+
+/**
+ * @memberof GMVDetector
+ * A detector that classifies a still image, returning GMVLabelFeature objects that provide
+ * information about detected labels.
+ */
+extern NSString * const GMVDetectorTypeLabel;
+
+#pragma mark - Label Detector Configuration Keys
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify the score threshold for labels returned by the label detector, a float
+ * value between 0 and 1.
+ *
+ * All features returned by the label detector have a score higher or equal to this threshold.
+ * If unset, a default value of kGMVDetectorLabelScoreThresholdDefaultValue is used.
+ */
+extern NSString * const GMVDetectorLabelScoreThreshold;
+
+#pragma mark - Barcode Detector Configuration Keys
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify the barcode detection formats. If not specified, defaults to
+ * GMVDetectorBarcodeFormatAllFormats.
+ */
+extern NSString * const GMVDetectorBarcodeFormats;
+
+#pragma mark - Face Detector Configuration Keys
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify detector's accuracy/speed trade-offs. If not specified, defaults to
+ * GMVDetectorFaceFastMode.
+ */
+extern NSString * const GMVDetectorFaceMode;
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify is face tracking feature enabled. If not specified, defaults to false.
+ */
+extern NSString * const GMVDetectorFaceTrackingEnabled;
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify the smallest desired face size. The size is expressed as a proportion
+ * of the width of the head to the image width. For example, if a value of 0.1 is specified, then
+ * the smallest face to search for is roughly 10% of the width of the image being searched.
+ * If not specified, defaults to 0.1.
+ */
+extern NSString * const GMVDetectorFaceMinSize;
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify whether to run additional classifiers for characterizing attributes
+ * such as smiling. If not specified, defaults to GMVDetectorFaceClassificationNone.
+ */
+extern NSString * const GMVDetectorFaceClassificationType;
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify whether to detect no landmarks or all landmarks. Processing time
+ * increases as the number of landmarks to search for increases, so detecting all landmarks
+ * will increase the overall detection time. If not specified, defaults to
+ * GMVDetectorFaceLandmarkNone.
+ */
+extern NSString * const GMVDetectorFaceLandmarkType;
+
+#pragma mark - Detector Detection Configuration Keys
+
+/**
+ * @memberof GMVDetector
+ * A key used to specify the display orientation of the image for face feature detection. The
+ * value of this key is an NSNumber wrapping a GMVImageOrientation.
+ */
+extern NSString * const GMVDetectorImageOrientation;
+
+#pragma mark - Feature Types
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a person’s face. Use the GMVFaceFeature class to get more
+ * information about the detected feature.
+ */
+extern NSString * const GMVFeatureTypeFace;
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a barcode. Use the GMVBarcodeFeature class to get more
+ * information about the detected feature.
+ */
+extern NSString * const GMVFeatureTypeBarcode;
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a text block. Use the GMVTextBlockFeature class to get more
+ * information about the detected feature.
+ */
+extern NSString * const GMVFeatureTypeTextBlock;
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a text line. Use the GMVTextLineFeature class to get more
+ * information about the detected feature.
+ */
+extern NSString * const GMVFeatureTypeTextLine;
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a text element. Use the GMVTextElementFeature class to get more
+ * information about the detected feature.
+ */
+extern NSString * const GMVFeatureTypeTextElement;
+
+/**
+ * @memberof GMVFeature
+ * The discovered feature is a label. Use the GMVLabelFeature class to get more information about
+ * the detected feature.
+ */
+extern NSString * const GMVFeatureTypeLabel;
+
+#endif  // GMVDetector_GMVDetectorConstants_h

+ 874 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVFeature.h

@@ -0,0 +1,874 @@
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "GMVDetectorConstants.h"
+
+/**
+ * Generic feature returned by a GMVDetector.
+ */
+@interface GMVFeature : NSObject
+
+/**
+ * The rectangle that holds the discovered feature relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGRect bounds;
+
+/**
+ * The type of feature that was discovered.
+ */
+@property(atomic, copy, readonly) NSString *type;
+
+/**
+ * Indicates whether the object has a tracking ID.
+ */
+@property(atomic, assign, readonly) BOOL hasTrackingID;
+
+/**
+ * The tracking identifier of the feature. This ID is not associated with a specific feature
+ * but identifies the same feature among consecutive video frames.
+ */
+@property(atomic, assign, readonly) NSUInteger trackingID;
+
+@end
+
+/**
+ * An email message from a 'MAILTO:' or similar QR Code type.
+ */
+@interface GMVBarcodeFeatureEmail : NSObject
+/**
+ * Email message address.
+ */
+@property(atomic, copy, readonly) NSString *address;
+
+/**
+ * Email message body.
+ */
+@property(atomic, copy, readonly) NSString *body;
+
+/**
+ * Email message subject.
+ */
+@property(atomic, copy, readonly) NSString *subject;
+
+/**
+ * Email message type.
+ */
+@property(atomic, assign, readonly) GMVBarcodeFeatureEmailType type;
+
+@end
+
+/**
+ * A phone number from a 'TEL:' or similar QR Code type.
+ */
+@interface GMVBarcodeFeaturePhone : NSObject
+
+/**
+ * Phone number.
+ */
+@property(atomic, copy, readonly) NSString *number;
+
+/**
+ * Phone number type.
+ */
+@property(atomic, assign, readonly) GMVBarcodeFeaturePhoneType type;
+
+@end
+
+/**
+ * An SMS message from an 'SMS:' or similar QR Code type.
+ */
+@interface GMVBarcodeFeatureSMS : NSObject
+
+/**
+ * An SMS message body.
+ */
+@property(atomic, copy, readonly) NSString *message;
+
+/**
+ * An SMS message phone number.
+ */
+@property(atomic, copy, readonly) NSString *phoneNumber;
+
+@end
+
+/**
+ * A URL and title from a 'MEBKM:' or similar QR Code type.
+ */
+@interface GMVBarcodeFeatureURLBookmark : NSObject
+
+/**
+ * A URL bookmark title.
+ */
+@property(atomic, copy, readonly) NSString *title;
+
+/**
+ * A URL bookmark url.
+ */
+@property(atomic, copy, readonly) NSString *url;
+
+@end
+
+/**
+ * Wi-Fi network parameters from a 'WIFI:' or similar QR Code type.
+ */
+@interface GMVBarcodeFeatureWiFi : NSObject
+
+/**
+ * A Wi-Fi access point SSID.
+ */
+@property(atomic, copy, readonly) NSString *ssid;
+
+/**
+ * A Wi-Fi access point password.
+ */
+@property(atomic, copy, readonly) NSString *password;
+
+/**
+ * A Wi-Fi access point encryption type.
+ */
+@property(atomic, assign, readonly) GMVBarcodeFeatureWiFiEncryptionType type;
+
+@end
+
+/**
+ * GPS coordinates from a 'GEO:' or similar QR Code type data.
+ */
+@interface GMVBarcodeFeatureGeoPoint : NSObject
+/**
+ * A location latitude.
+ */
+@property(atomic, assign, readonly) double latitude;
+
+/**
+ * A location longitude.
+ */
+@property(atomic, assign, readonly) double longitude;
+
+@end
+
+/**
+ * An address.
+ */
+@interface GMVBarcodeFeatureAddress : NSObject
+
+/**
+ * Formatted address, containing multiple lines when appropriate.
+ */
+@property(atomic, copy, readonly) NSArray<NSString *> *addressLines;
+
+/**
+ * Address type.
+ */
+@property(atomic, assign, readonly) GMVBarcodeFeatureAddressType type;
+
+@end
+
+/**
+ * A person's name, both formatted and as individual name components.
+ */
+@interface GMVBarcodeFeaturePersonName : NSObject
+
+/**
+ * Properly formatted name.
+ */
+@property(atomic, copy, readonly) NSString *formattedName;
+
+/**
+ * First name.
+ */
+@property(atomic, copy, readonly) NSString *first;
+
+/**
+ * Last name.
+ */
+@property(atomic, copy, readonly) NSString *last;
+
+/**
+ * Middle name.
+ */
+@property(atomic, copy, readonly) NSString *middle;
+
+/**
+ * Name prefix.
+ */
+@property(atomic, copy, readonly) NSString *prefix;
+
+/**
+ * Designates a text string to be set as the kana name in the phonebook.
+ * Used for Japanese contacts.
+ */
+@property(atomic, copy, readonly) NSString *pronounciation;
+
+/**
+ * Name suffix.
+ */
+@property(atomic, copy, readonly) NSString *suffix;
+
+@end
+
+/**
+ * A person's or organization's business card. For example, a vCard.
+ */
+@interface GMVBarcodeFeatureContactInfo : NSObject
+
+/**
+ * Person's or organization's addresses.
+ */
+@property(atomic, copy, readonly) NSArray<GMVBarcodeFeatureAddress *> *addresses;
+
+/**
+ * Contact emails.
+ */
+@property(atomic, copy, readonly) NSArray<GMVBarcodeFeatureEmail *> *emails;
+
+/**
+ * A person's name.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeaturePersonName *name;
+
+/**
+ * Contact phone numbers.
+ */
+@property(atomic, copy, readonly) NSArray<GMVBarcodeFeaturePhone *> *phones;
+
+/**
+ * Contact URLs.
+ */
+@property(atomic, copy, readonly) NSArray<NSString *> *urls;
+
+/**
+ * Job title.
+ */
+@property(atomic, copy, readonly) NSString *jobTitle;
+
+/**
+ * Business organization.
+ */
+@property(atomic, copy, readonly) NSString *organization;
+
+@end
+
+/**
+ * A calendar event extracted from a QR code.
+ */
+@interface GMVBarcodeFeatureCalendarEvent : NSObject
+
+/**
+ * Calendar event description.
+ */
+@property(atomic, copy, readonly) NSString *eventDescription;
+
+/**
+ * Calendar event location.
+ */
+@property(atomic, copy, readonly) NSString *location;
+
+/**
+ * Clendar event organizer.
+ */
+@property(atomic, copy, readonly) NSString *organizer;
+
+/**
+ * Calendar event status.
+ */
+@property(atomic, copy, readonly) NSString *status;
+
+/**
+ * Calendar event summary.
+ */
+@property(atomic, copy, readonly) NSString *summary;
+
+/**
+ * Calendar event start date.
+ */
+@property(atomic, strong, readonly) NSDate *start;
+
+/**
+ * Calendar event end date.
+ */
+@property(atomic, strong, readonly) NSDate *end;
+
+@end
+
+/**
+ * A driver license or ID card data representation.
+ */
+@interface GMVBarcodeFeatureDriverLicense : NSObject
+
+/**
+ * Holder's first name.
+ */
+@property(atomic, copy, readonly) NSString *firstName;
+
+/**
+ * Holder's middle name.
+ */
+@property(atomic, copy, readonly) NSString *middleName;
+
+/**
+ * Holder's last name.
+ */
+@property(atomic, copy, readonly) NSString *lastName;
+
+/**
+ * Holder's gender. 1 is male and 2 is female.
+ */
+@property(atomic, copy, readonly) NSString *gender;
+
+/**
+ * Holder's city address.
+ */
+@property(atomic, copy, readonly) NSString *addressCity;
+
+/**
+ * Holder's state address.
+ */
+@property(atomic, copy, readonly) NSString *addressState;
+
+/**
+ * Holder's street address.
+ */
+@property(atomic, copy, readonly) NSString *addressStreet;
+
+/**
+ * Holder's address' zipcode.
+ */
+@property(atomic, copy, readonly) NSString *addressZip;
+
+/**
+ * Holder's birthday. The date format depends on the issuing country.
+ */
+@property(atomic, copy, readonly) NSString *birthDate;
+
+/**
+ * "DL" for driver licenses, "ID" for ID cards.
+ */
+@property(atomic, copy, readonly) NSString *documentType;
+
+/**
+ * Driver license ID number.
+ */
+@property(atomic, copy, readonly) NSString *licenseNumber;
+
+/**
+ * Driver license expiration date. The date format depends on the issuing country.
+ */
+@property(atomic, copy, readonly) NSString *expiryDate;
+
+/**
+ * The date format depends on the issuing country.
+ */
+@property(atomic, copy, readonly) NSString *issuingDate;
+
+/**
+ * Country in which DL/ID was issued.
+ */
+@property(atomic, copy, readonly) NSString *issuingCountry;
+
+@end
+
+/**
+ * Describes a barcode detected in a still image frame. Its properties provide barcode value
+ * information.
+ */
+@interface GMVBarcodeFeature : GMVFeature
+
+/**
+ * Barcode value as it was encoded in the barcode. Structured values are not parsed, for example:
+ * 'MEBKM:TITLE:Google;URL:https://www.google.com;;'. Does not include the supplemental value.
+ */
+@property(atomic, copy, readonly) NSString *rawValue;
+
+/**
+ * Barcode value in a user-friendly format. May omit some of the information encoded in the
+ * barcode. For example, in the case above the display_value might be 'https://www.google.com'.
+ * If valueFormat==TEXT, this field will be equal to rawValue. This value may be multiline,
+ * for example, when line breaks are encoded into the original TEXT barcode value. May include
+ * the supplement value.
+ */
+@property(atomic, copy, readonly) NSString *displayValue;
+
+/**
+ * Barcode format; for example, EAN_13. Note that this field may contain values not present in the
+ * current set of format constants. When mapping this value to something else, it is advisable
+ * to have a default/fallback case.
+ */
+@property(atomic, assign, readonly) GMVDetectorBarcodeFormat format;
+
+/**
+ * The four corner points of the barcode, in clockwise order starting with the top left relative
+ * to the detected image in the view coordinate system. These are CGPoints boxed in NSValues.
+ * Due to the possible perspective distortions, this is not necessarily a rectangle.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * Format of the barcode value. For example, TEXT, PRODUCT, URL, etc. Note that this field may
+ * contain values not present in the current set of value format constants. When mapping this
+ * value to something else, it is advisable to have a default/fallback case.
+ */
+@property(atomic, assign, readonly) GMVDetectorBarcodeValueFormat valueFormat;
+
+/**
+ * An email message from a 'MAILTO:' or similar QR Code type. This properly is only set if
+ * valueFormat is GMVDetectorBarcodeValueFormatEmail.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureEmail *email;
+
+/**
+ * A phone number from a 'TEL:' or similar QR Code type. This property is only set if valueFormat
+ * is GMVDetectorBarcodeValueFormatPhone.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeaturePhone *phone;
+
+/**
+ * An SMS message from an 'SMS:' or similar QR Code type. This property is only set if valueFormat
+ * is GMVDetectorBarcodeValueFormatSMS.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureSMS *sms;
+
+/**
+ * A URL and title from a 'MEBKM:' or similar QR Code type. This property is only set iff
+ * valueFormat is GMVDetectorBarcodeValueFormatURL.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureURLBookmark *url;
+
+/**
+ * Wi-Fi network parameters from a 'WIFI:' or similar QR Code type. This property is only set
+ * iff valueFormat is GMVDetectorBarcodeValueFormatWifi.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureWiFi *wifi;
+
+/**
+ * GPS coordinates from a 'GEO:' or similar QR Code type. This property is only set iff valueFormat
+ * is GMVDetectorBarcodeValueFormatGeo
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureGeoPoint *geoPoint;
+
+/**
+ * A person's or organization's business card. For example a VCARD. This property is only set
+ * iff valueFormat is GMVDetectorBarcodeValueFormatContactInfo.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureContactInfo *contactInfo;
+
+/**
+ * A calendar event extracted from a QR Code. This property is only set iff valueFormat is
+ * GMVDetectorBarcodeValueFormatCalendarEvent.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureCalendarEvent *calendarEvent;
+
+/**
+ * A driver license or ID card. This property is only set iff valueFormat is
+ * GMVDetectorBarcodeValueFormatDriverLicense.
+ */
+@property(atomic, strong, readonly) GMVBarcodeFeatureDriverLicense *driverLicense;
+
+@end
+
+/**
+ * Describes a single element in a line of detected text. An "element" is roughly equivalent to a
+ * space-separated "word" in most Latin-script languages.
+ */
+@interface GMVTextElementFeature : GMVFeature
+
+/**
+ * Text contained in this element, in string form.
+ */
+@property(atomic, copy, readonly) NSString *value;
+
+/**
+ * The four corner points of the text line, in clockwise order starting with the top left relative
+ * to the detected image in the view coordinate system. These are CGPoints boxed in NSValues.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *cornerPoints;
+
+@end
+
+/**
+ * Describes a single line of detected text.
+ */
+@interface GMVTextLineFeature : GMVFeature
+
+/**
+ * Text contained in this text line, in string form.
+ */
+@property(atomic, copy, readonly) NSString *value;
+
+/**
+ * The prevailing language in the text line. The format is the ISO 639-1 two-letter language code if
+ * that is defined (e.g. "en"), or else the ISO 639-2 three-letter code if that is defined.
+ */
+@property(atomic, copy, readonly) NSString *language;
+
+/**
+ * The four corner points of the text line, in clockwise order starting with the top left relative
+ * to the detected image in the view coordinate system. These are CGPoints boxed in NSValues.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * Text elements in this line.
+ */
+@property(atomic, copy, readonly) NSArray<GMVTextElementFeature *> *elements;
+
+@end
+
+/**
+ * Describes a text block detected in a still image frame. Its properties provide details
+ * about detected text.
+ */
+@interface GMVTextBlockFeature : GMVFeature
+
+/**
+ * Text contained in the text block, in string form.
+ */
+@property(atomic, copy, readonly) NSString *value;
+
+/**
+ * The prevailing language in the text block. The format is the ISO 639-1 two-letter language code
+ * if that is defined (e.g. "en"), or else the ISO 639-2 three-letter code if that is defined.
+ */
+@property(atomic, copy, readonly) NSString *language;
+
+/**
+ * The four corner points of the text block, in clockwise order starting with the top left relative
+ * to the detected image in the view coordinate system. These are CGPoints boxed in NSValues.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *cornerPoints;
+
+/**
+ * The contents of the text block, broken down into individual lines.
+ */
+@property(atomic, copy, readonly) NSArray<GMVTextLineFeature *> *lines;
+
+@end
+
+/**
+ * Describes facial contours in a still image frame. A facial contour is a set of points that
+ * outlines a facial landmark or region.
+ */
+@interface GMVFaceContour : NSObject
+
+/**
+ * All contour points.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *allPoints;
+/**
+ * A set of points outlines the face oval, relative to the detected image in the view coordinate
+ * system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *faceContour;
+/**
+ * A set of points outlines the top of the left eyebrow, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *topLeftEyebrowContour;
+/**
+ * A set of points outlines the bottom of the left eyebrow, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *bottomLeftEyebrowContour;
+/**
+ * A set of points outlines the top of the right eyebrow, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *topRightEyebrowContour;
+/**
+ * A set of points outlines the bottom of the right eyebrow, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *bottomRightEyebrowContour;
+/**
+ * A set of points outlines the left eye, relative to the detected image in the view coordinate
+ * system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *leftEyeContour;
+/**
+ * A set of points outlines the right eye, relative to the detected image in the view coordinate
+ * system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *rightEyeContour;
+/**
+ * A set of points outlines the top of the upper lip, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *topUpperLipContour;
+/**
+ * A set of points outlines the bottom of the upper lip, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *bottomUpperLipContour;
+/**
+ * A set of points outlines the top of the lower lip, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *topLowerLipContour;
+/**
+ * A set of points outlines the bottom of the lower lip, relative to the detected image in the
+ * view coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *bottomLowerLipContour;
+/**
+ * A set of points outlines the nose bridge, relative to the detected image in the view coordinate
+ * system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *noseBridgeContour;
+/**
+ * A set of points outlines the bottom of the nose, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, copy, readonly) NSArray<NSValue *> *bottomNoseContour;
+
+@end
+
+/**
+ * Describes a face detected in a still image frame. Its properties provide face
+ * landmark information.
+ */
+@interface GMVFaceFeature : GMVFeature
+
+#pragma mark - Head properties
+
+/**
+ * Indicates whether the detector found the head y euler angle.
+ */
+@property(atomic, assign, readonly) BOOL hasHeadEulerAngleY;
+
+/**
+ * Indicates the rotation of the face about the vertical axis of the image.
+ * Positive euler y is when the face is turned towards the right side of the image that is being
+ * processed.
+ */
+@property(atomic, assign, readonly) CGFloat headEulerAngleY;
+
+/**
+ * Indicates whether the detector found the head z euler angle.
+ */
+@property(atomic, assign, readonly) BOOL hasHeadEulerAngleZ;
+
+/**
+ * Indicates the rotation of the face about the axis pointing out of the image.
+ * Positive euler z is a counter-clockwise rotation within the image plane.
+ */
+@property(atomic, assign, readonly) CGFloat headEulerAngleZ;
+
+/**
+ * Indicates the rotation of the face about the horizontal axis.
+ * Positive euler x is the rotation when the face looks up.
+ */
+@property(atomic, assign, readonly) CGFloat headEulerAngleX;
+
+/**
+ * Indicates whether the detector found the head x euler angle.
+ */
+@property(atomic, assign, readonly) BOOL hasHeadEulerAngleX;
+
+#pragma mark - Mouth properties
+
+/**
+ * Indicates whether the detector found the face’s mouth corner where the
+ * lips meet.
+ */
+@property(atomic, assign, readonly) BOOL hasMouthPosition;
+
+/**
+ * The coordinates of the mouth corner where the lips meet, relative to the detected image in
+ * the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint mouthPosition;
+
+/**
+ * Indicates whether the detector found the face's bottom lip center.
+ */
+@property(atomic, assign, readonly) BOOL hasBottomMouthPosition;
+
+/**
+ * The coordinates of the bottom lip center, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint bottomMouthPosition;
+
+/**
+ * Indicates whether the detector found the face's right mouth corner.
+ */
+@property(atomic, assign, readonly) BOOL hasRightMouthPosition;
+
+/**
+ * The coordinates of the right mouth corner, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint rightMouthPosition;
+
+/**
+ * Indicates whether the detector found the face's left mouth corner.
+ */
+@property(atomic, assign, readonly) BOOL hasLeftMouthPosition;
+
+/**
+ * The coordinates of the left mouth corner, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint leftMouthPosition;
+
+#pragma mark - Ear properties
+
+/**
+ * Indicates whether the detector found the midpoint of the face's left
+ * ear tip and left ear lobe.
+ */
+@property(atomic, assign, readonly) BOOL hasLeftEarPosition;
+
+/**
+ * The coordinates of the midpoint between the face's midpoint of the left ear tip and left ear
+ * lobe, relative to the detected image in the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint leftEarPosition;
+
+/**
+ * Indicates whether the detector found the face's left ear tip. Treating the
+ * top of the face's left ear as a circle, this is the point at 45 degrees around the circle in
+ * Cartesian coordinates.
+ */
+@property(atomic, assign, readonly) BOOL hasRightEarPosition;
+
+/**
+ * The coordinates of the midpoint between the face's midpoint of the right ear tip and right ear
+ * lobe, relative to the detected image in the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint rightEarPosition;
+
+#pragma mark - Eye properties
+
+/**
+ * Indicates whether the detector found the face’s left eye.
+ */
+@property(atomic, assign, readonly) BOOL hasLeftEyePosition;
+
+/**
+ * The coordinates of the left eye, relative to the detected image in the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint leftEyePosition;
+
+/**
+ * Indicates whether the detector found the face’s right eye.
+ */
+@property(atomic, assign, readonly) BOOL hasRightEyePosition;
+
+/**
+ * The coordinates of the right eye, relative to the detected image in the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint rightEyePosition;
+
+#pragma mark - Cheek properties
+
+/**
+ * Indicates whether the detector found the face's left cheek.
+ */
+@property(atomic, assign, readonly) BOOL hasLeftCheekPosition;
+
+/**
+ * The coordinates of the left cheek, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint leftCheekPosition;
+
+/**
+ * Indicates whether the detector found the face's right cheek.
+ */
+@property(atomic, assign, readonly) BOOL hasRightCheekPosition;
+
+/**
+ * The coordinates of the right cheek, relative to the detected image in the view
+ * coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint rightCheekPosition;
+
+#pragma mark - Nose properties
+
+/**
+ * Indicates whether the detector found the midpoint between the face's
+ * nostrils where the nose meets the face.
+ */
+@property(atomic, assign, readonly) BOOL hasNoseBasePosition;
+
+/**
+ * The coordinates of the midpoint between the nostrils, relative to the detected image in
+ * the view coordinate system.
+ */
+@property(atomic, assign, readonly) CGPoint noseBasePosition;
+
+#pragma mark - Classifier properties
+
+/**
+ * Indicates whether a smiling probability is available.
+ */
+@property(atomic, assign, readonly) BOOL hasSmilingProbability;
+
+/**
+ * A value between 0.0 and 1.0 giving a probability that the face is smiling.
+ **/
+@property(atomic, assign, readonly) CGFloat smilingProbability;
+
+/**
+ * Indicates whether a left eye open probability is available.
+ */
+@property(atomic, assign, readonly) BOOL hasLeftEyeOpenProbability;
+
+/**
+ * A value between 0.0 and 1.0 giving a probability that the face's left eye is open.
+ */
+@property(atomic, assign, readonly) CGFloat leftEyeOpenProbability;
+
+/**
+ * Indicates whether a right eye open probability is available.
+ */
+@property(atomic, assign, readonly) BOOL hasRightEyeOpenProbability;
+
+/**
+ * A value between 0.0 and 1.0 giving a probability that the face's right eye is open.
+ */
+@property(atomic, assign, readonly) CGFloat rightEyeOpenProbability;
+
+/**
+ * Describes a set of points that outlines a facial landmark.
+ */
+@property(atomic, copy, readonly) GMVFaceContour *contour;
+
+
+@end
+
+/**
+ * Describes a label detected in a still image frame. Its properties provide details about the
+ * label.
+ */
+@interface GMVLabelFeature : GMVFeature
+
+/**
+ * Machine-generated identifier (thus MID) corresponding to the entity's Google Knowledge Graph
+ * entry. For example: "/m/01j51".
+ *
+ * Note the globally unique MID values remain unchanged across different languages, so you
+ * can use this value to tie entities together from different languages. To inspect the MID
+ * value, refer to the Google Knowledge Graph API documentation.
+ * https://developers.google.com/knowledge-graph/reference/rest/v1/
+ */
+@property(atomic, copy, readonly) NSString *MID;
+
+/**
+ * Description of the label, i.e. human readable string in American English. For example: "Balloon".
+ *
+ * Note: this is not fit for display purposes, as it is not localized. Use the MID and query the
+ * Knowledge Graph to get a localized description of the label.
+ */
+@property(atomic, copy, readonly) NSString *labelDescription;
+
+/**
+ * Confidence score for the label (between 0 and 1).
+ *
+ * Features coming from a label detector all have scores higher or equal to the detector's
+ * configured threshold.
+ */
+@property(atomic, assign, readonly) float score;
+
+@end

+ 58 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GMVUtility.h

@@ -0,0 +1,58 @@
+#import <AVFoundation/AVFoundation.h>
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+#import "GMVDetectorConstants.h"
+
+@interface GMVUtility : NSObject
+
+/**
+ * Converts CMSampleBuffer to UIImage. This function supports 420v, 420f, and BGRA
+ * CVPixelBufferPixelFormatTypes.
+ *
+ * @param sampleBuffer The buffer to convert to UIImage.
+ * @returns UIImage in RGBA8888 format.
+ */
++ (UIImage *)sampleBufferTo32RGBA:(CMSampleBufferRef)sampleBuffer;
+
+/**
+ * Converts CMSampleBuffer to RGBA8888 data format. This function supports 420v, 420f, and BGRA
+ * CVPixelBufferPixelFormatTypes. The output data corresponds to an image with exact same width
+ * and height than input |sampleBuffer|. The number of bytes in a pixel row corresponds to the
+ * returned NSData's length divided by height.
+ *
+ * @param sampleBuffer The buffer to convert to NSData.
+ * @returns NSData in RGBA8888 format.
+ */
++ (NSData *)anySampleBufferFormatTo32RGBA:(CMSampleBufferRef)sampleBuffer;
+
+/**
+ * This function determines the image exif metadata using device orientation and device
+ * position. The orientation is significant when using detections on an image generated
+ * from AVCaptureVideoDataOutput CMSampleBuffer. AVCaptureVideoDataOutput does not support
+ * setting the video orientation, therefore the client has to handle the rotation on their own.
+ * GMVImageOrientation can be passed in to GMVDetector featuresInImage:options: to let the
+ * detector handle the video rotation for you.
+ *
+ * @param deviceOrientation The device orientation.
+ * @param position The caputre device position.
+ * @param defaultOrientation The default device orientation to use when |deviceOrientaiton| has
+ *        value as UIDeviceOrientationFaceUp or UIDeviceOrientationFaceDown.
+ * @returns GMVImageOrientation value to express an image exif metadata.
+ */
++ (GMVImageOrientation)imageOrientationFromOrientation:(UIDeviceOrientation)deviceOrientation
+                             withCaptureDevicePosition:(AVCaptureDevicePosition)position
+                              defaultDeviceOrientation:(UIDeviceOrientation)defaultOrientation;
+
+/**
+ * Converts a RGBA buffer to an UIImage. The number of bytes in a pixel row is inferred from
+ * |data|'s length divided by |height|.
+ *
+ * @param data The RGBA buffer.
+ * @param width The width of the buffer.
+ * @param height The height of the height of the buffer.
+ * @returns An UIImage instance constructed from |data|.
+ */
++ (UIImage *)imageFromData:(NSData *)data width:(size_t)width height:(size_t)height;
+
+@end

+ 4 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Headers/GoogleMobileVision.h

@@ -0,0 +1,4 @@
+#import "GMVDetector.h"
+#import "GMVDetectorConstants.h"
+#import "GMVFeature.h"
+#import "GMVUtility.h"

+ 16 - 0
Libraries external/Firebase/MLVision/GoogleMobileVision.framework/Modules/module.modulemap

@@ -0,0 +1,16 @@
+framework module GoogleMobileVision {
+  umbrella header "GoogleMobileVision.h"
+  export *
+  module * { export *}
+  link "sqlite3"
+  link "z"
+  link framework "AVFoundation"
+  link framework "AddressBook"
+  link framework "CoreGraphics"
+  link framework "CoreMedia"
+  link framework "CoreVideo"
+  link framework "Foundation"
+  link framework "LocalAuthentication"
+  link framework "QuartzCore"
+  link framework "Security"
+  link framework "UIKit"}

BIN
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/GoogleToolboxForMac


+ 100 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDebugSelectorValidation.h

@@ -0,0 +1,100 @@
+//
+//  GTMDebugSelectorValidation.h
+//
+//  This file should only be included within an implimation file.  In any
+//  function that takes an object and selector to invoke, you should call:
+//
+//    GTMAssertSelectorNilOrImplementedWithArguments(obj, sel, @encode(arg1type), ..., NULL)
+//  or
+//    GTMAssertSelectorNilOrImplementedWithReturnTypeAndArguments(obj, sel, @encode(returnType), @encode(arg1type), ..., NULL)
+//
+//  This will then validate that the selector is defined and using the right
+//  type(s), this can help catch errors much earlier then waiting for the
+//  selector to actually fire (and in the case of error selectors, might never
+//  really be tested until in the field).
+//
+//  Copyright 2007-2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#if DEBUG
+
+#import <stdarg.h>
+#import "GTMDefines.h"
+
+static void GTMAssertSelectorNilOrImplementedWithReturnTypeAndArguments(id obj, SEL sel, const char *retType, ...) {
+
+  // verify that the object's selector is implemented with the proper
+  // number and type of arguments
+  va_list argList;
+  va_start(argList, retType);
+
+  if (obj && sel) {
+    // check that the selector is implemented
+    _GTMDevAssert([obj respondsToSelector:sel],
+                  @"\"%@\" selector \"%@\" is unimplemented or misnamed",
+                  NSStringFromClass([obj class]),
+                  NSStringFromSelector(sel));
+
+    const char *expectedArgType;
+    NSUInteger argCount = 2; // skip self and _cmd
+    NSMethodSignature *sig = [obj methodSignatureForSelector:sel];
+
+    // check that each expected argument is present and of the correct type
+    while ((expectedArgType = va_arg(argList, const char*)) != 0) {
+
+      if ([sig numberOfArguments] > argCount) {
+        const char *foundArgType = [sig getArgumentTypeAtIndex:argCount];
+
+        _GTMDevAssert(0 == strncmp(foundArgType, expectedArgType, strlen(expectedArgType)),
+                      @"\"%@\" selector \"%@\" argument %u should be type %s",
+                      NSStringFromClass([obj class]),
+                      NSStringFromSelector(sel),
+                      (uint32_t)(argCount - 2),
+                      expectedArgType);
+      }
+      argCount++;
+    }
+
+    // check that the proper number of arguments are present in the selector
+    _GTMDevAssert(argCount == [sig numberOfArguments],
+                  @"\"%@\" selector \"%@\" should have %u arguments",
+                  NSStringFromClass([obj class]),
+                  NSStringFromSelector(sel),
+                  (uint32_t)(argCount - 2));
+
+    // if asked, validate the return type
+    if (retType && (strcmp("gtm_skip_return_test", retType) != 0)) {
+      const char *foundRetType = [sig methodReturnType];
+      _GTMDevAssert(0 == strncmp(foundRetType, retType, strlen(retType)),
+                    @"\"%@\" selector \"%@\" return type should be type %s",
+                    NSStringFromClass([obj class]),
+                    NSStringFromSelector(sel),
+                    retType);
+    }
+  }
+
+  va_end(argList);
+}
+
+#define GTMAssertSelectorNilOrImplementedWithArguments(obj, sel, ...) \
+  GTMAssertSelectorNilOrImplementedWithReturnTypeAndArguments((obj), (sel), "gtm_skip_return_test", __VA_ARGS__)
+
+#else // DEBUG
+
+// make it go away if not debug
+#define GTMAssertSelectorNilOrImplementedWithReturnTypeAndArguments(obj, sel, retType, ...) do { } while (0)
+#define GTMAssertSelectorNilOrImplementedWithArguments(obj, sel, ...) do { } while (0)
+
+#endif // DEBUG

+ 44 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDebugThreadValidation.h

@@ -0,0 +1,44 @@
+//
+//  GTMDebugThreadValidation.h
+//
+//  Copyright 2016 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import "GTMDefines.h"
+#import <Foundation/Foundation.h>
+
+// GTMCheckCurrentQueue, GTMIsCurrentQueue
+//
+// GTMCheckCurrentQueue takes a target queue and uses _GTMDevAssert to
+// report if that is not the currently executing queue.
+//
+// GTMIsCurrentQueue takes a target queue and returns true if the target queue
+// is the currently executing dispatch queue. This can be passed to another
+// assertion call in debug builds; it should never be used in release code.
+//
+// The dispatch queue must have a label.
+#define GTMCheckCurrentQueue(targetQueue)                    \
+  _GTMDevAssert(GTMIsCurrentQueue(targetQueue),              \
+                @"Current queue is %s (expected %s)",        \
+                _GTMQueueName(DISPATCH_CURRENT_QUEUE_LABEL), \
+                _GTMQueueName(targetQueue))
+
+#define GTMIsCurrentQueue(targetQueue)                 \
+  (strcmp(_GTMQueueName(DISPATCH_CURRENT_QUEUE_LABEL), \
+          _GTMQueueName(targetQueue)) == 0)
+
+#define _GTMQueueName(queue)                     \
+  (strlen(dispatch_queue_get_label(queue)) > 0 ? \
+    dispatch_queue_get_label(queue) : "unnamed")

+ 375 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMDefines.h

@@ -0,0 +1,375 @@
+//
+// GTMDefines.h
+//
+//  Copyright 2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+// ============================================================================
+
+#include <AvailabilityMacros.h>
+#include <TargetConditionals.h>
+
+#ifdef __OBJC__
+#include <Foundation/NSObjCRuntime.h>
+#endif  // __OBJC__
+
+#if TARGET_OS_IPHONE
+#include <Availability.h>
+#endif  // TARGET_OS_IPHONE
+
+// ----------------------------------------------------------------------------
+// CPP symbols that can be overridden in a prefix to control how the toolbox
+// is compiled.
+// ----------------------------------------------------------------------------
+
+
+// By setting the GTM_CONTAINERS_VALIDATION_FAILED_LOG and
+// GTM_CONTAINERS_VALIDATION_FAILED_ASSERT macros you can control what happens
+// when a validation fails. If you implement your own validators, you may want
+// to control their internals using the same macros for consistency.
+#ifndef GTM_CONTAINERS_VALIDATION_FAILED_ASSERT
+  #define GTM_CONTAINERS_VALIDATION_FAILED_ASSERT 0
+#endif
+
+// Ensure __has_feature and __has_extension are safe to use.
+// See http://clang-analyzer.llvm.org/annotations.html
+#ifndef __has_feature      // Optional.
+  #define __has_feature(x) 0 // Compatibility with non-clang compilers.
+#endif
+
+#ifndef __has_extension
+  #define __has_extension __has_feature // Compatibility with pre-3.0 compilers.
+#endif
+
+// Give ourselves a consistent way to do inlines.  Apple's macros even use
+// a few different actual definitions, so we're based off of the foundation
+// one.
+#if !defined(GTM_INLINE)
+  #if (defined (__GNUC__) && (__GNUC__ == 4)) || defined (__clang__)
+    #define GTM_INLINE static __inline__ __attribute__((always_inline))
+  #else
+    #define GTM_INLINE static __inline__
+  #endif
+#endif
+
+// Give ourselves a consistent way of doing externs that links up nicely
+// when mixing objc and objc++
+#if !defined (GTM_EXTERN)
+  #if defined __cplusplus
+    #define GTM_EXTERN extern "C"
+    #define GTM_EXTERN_C_BEGIN extern "C" {
+    #define GTM_EXTERN_C_END }
+  #else
+    #define GTM_EXTERN extern
+    #define GTM_EXTERN_C_BEGIN
+    #define GTM_EXTERN_C_END
+  #endif
+#endif
+
+// Give ourselves a consistent way of exporting things if we have visibility
+// set to hidden.
+#if !defined (GTM_EXPORT)
+  #define GTM_EXPORT __attribute__((visibility("default")))
+#endif
+
+// Give ourselves a consistent way of declaring something as unused. This
+// doesn't use __unused because that is only supported in gcc 4.2 and greater.
+#if !defined (GTM_UNUSED)
+#define GTM_UNUSED(x) ((void)(x))
+#endif
+
+// _GTMDevLog & _GTMDevAssert
+//
+// _GTMDevLog & _GTMDevAssert are meant to be a very lightweight shell for
+// developer level errors.  This implementation simply macros to NSLog/NSAssert.
+// It is not intended to be a general logging/reporting system.
+//
+// Please see http://code.google.com/p/google-toolbox-for-mac/wiki/DevLogNAssert
+// for a little more background on the usage of these macros.
+//
+//    _GTMDevLog           log some error/problem in debug builds
+//    _GTMDevAssert        assert if condition isn't met w/in a method/function
+//                           in all builds.
+//
+// To replace this system, just provide different macro definitions in your
+// prefix header.  Remember, any implementation you provide *must* be thread
+// safe since this could be called by anything in what ever situtation it has
+// been placed in.
+//
+
+// Ignore the "Macro name is a reserved identifier" warning in this section
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wreserved-id-macro"
+
+// We only define the simple macros if nothing else has defined this.
+#ifndef _GTMDevLog
+
+#ifdef DEBUG
+  #define _GTMDevLog(...) NSLog(__VA_ARGS__)
+#else
+  #define _GTMDevLog(...) do { } while (0)
+#endif
+
+#endif // _GTMDevLog
+
+#ifndef _GTMDevAssert
+// we directly invoke the NSAssert handler so we can pass on the varargs
+// (NSAssert doesn't have a macro we can use that takes varargs)
+#if !defined(NS_BLOCK_ASSERTIONS)
+  #define _GTMDevAssert(condition, ...)                                       \
+    do {                                                                      \
+      if (!(condition)) {                                                     \
+        [[NSAssertionHandler currentHandler]                                  \
+            handleFailureInFunction:(NSString *)                              \
+                                        [NSString stringWithUTF8String:__PRETTY_FUNCTION__] \
+                               file:(NSString *)[NSString stringWithUTF8String:__FILE__]  \
+                         lineNumber:__LINE__                                  \
+                        description:__VA_ARGS__];                             \
+      }                                                                       \
+    } while(0)
+#else // !defined(NS_BLOCK_ASSERTIONS)
+  #define _GTMDevAssert(condition, ...) do { } while (0)
+#endif // !defined(NS_BLOCK_ASSERTIONS)
+
+#endif // _GTMDevAssert
+
+// _GTMCompileAssert
+//
+// Note:  Software for current compilers should just use _Static_assert directly
+// instead of this macro.
+//
+// _GTMCompileAssert is an assert that is meant to fire at compile time if you
+// want to check things at compile instead of runtime. For example if you
+// want to check that a wchar is 4 bytes instead of 2 you would use
+// _GTMCompileAssert(sizeof(wchar_t) == 4, wchar_t_is_4_bytes_on_OS_X)
+// Note that the second "arg" is not in quotes, and must be a valid processor
+// symbol in it's own right (no spaces, punctuation etc).
+
+// Wrapping this in an #ifndef allows external groups to define their own
+// compile time assert scheme.
+#ifndef _GTMCompileAssert
+  #if __has_feature(c_static_assert) || __has_extension(c_static_assert)
+    #define _GTMCompileAssert(test, msg) _Static_assert((test), #msg)
+  #else
+    // Pre-Xcode 7 support.
+    //
+    // We got this technique from here:
+    // http://unixjunkie.blogspot.com/2007/10/better-compile-time-asserts_29.html
+    #define _GTMCompileAssertSymbolInner(line, msg) _GTMCOMPILEASSERT ## line ## __ ## msg
+    #define _GTMCompileAssertSymbol(line, msg) _GTMCompileAssertSymbolInner(line, msg)
+    #define _GTMCompileAssert(test, msg) \
+      typedef char _GTMCompileAssertSymbol(__LINE__, msg) [ ((test) ? 1 : -1) ]
+  #endif  // __has_feature(c_static_assert) || __has_extension(c_static_assert)
+#endif // _GTMCompileAssert
+
+#pragma clang diagnostic pop
+
+// ----------------------------------------------------------------------------
+// CPP symbols defined based on the project settings so the GTM code has
+// simple things to test against w/o scattering the knowledge of project
+// setting through all the code.
+// ----------------------------------------------------------------------------
+
+// Provide a single constant CPP symbol that all of GTM uses for ifdefing
+// iPhone code.
+#if TARGET_OS_IPHONE // iPhone SDK
+  // For iPhone specific stuff
+  #define GTM_IPHONE_SDK 1
+  #if TARGET_IPHONE_SIMULATOR
+    #define GTM_IPHONE_DEVICE 0
+    #define GTM_IPHONE_SIMULATOR 1
+  #else
+    #define GTM_IPHONE_DEVICE 1
+    #define GTM_IPHONE_SIMULATOR 0
+  #endif  // TARGET_IPHONE_SIMULATOR
+  // By default, GTM has provided it's own unittesting support, define this
+  // to use the support provided by Xcode, especially for the Xcode4 support
+  // for unittesting.
+  #ifndef GTM_USING_XCTEST
+    #define GTM_USING_XCTEST 0
+  #endif
+  #define GTM_MACOS_SDK 0
+#else
+  // For MacOS specific stuff
+  #define GTM_MACOS_SDK 1
+  #define GTM_IPHONE_SDK 0
+  #define GTM_IPHONE_SIMULATOR 0
+  #define GTM_IPHONE_DEVICE 0
+  #ifndef GTM_USING_XCTEST
+    #define GTM_USING_XCTEST 0
+  #endif
+#endif
+
+// Some of our own availability macros
+#if GTM_MACOS_SDK
+#define GTM_AVAILABLE_ONLY_ON_IPHONE UNAVAILABLE_ATTRIBUTE
+#define GTM_AVAILABLE_ONLY_ON_MACOS
+#else
+#define GTM_AVAILABLE_ONLY_ON_IPHONE
+#define GTM_AVAILABLE_ONLY_ON_MACOS UNAVAILABLE_ATTRIBUTE
+#endif
+
+// GC was dropped by Apple, define the old constant incase anyone still keys
+// off of it.
+#ifndef GTM_SUPPORT_GC
+  #define GTM_SUPPORT_GC 0
+#endif
+
+// Some support for advanced clang static analysis functionality
+#ifndef NS_RETURNS_RETAINED
+  #if __has_feature(attribute_ns_returns_retained)
+    #define NS_RETURNS_RETAINED __attribute__((ns_returns_retained))
+  #else
+    #define NS_RETURNS_RETAINED
+  #endif
+#endif
+
+#ifndef NS_RETURNS_NOT_RETAINED
+  #if __has_feature(attribute_ns_returns_not_retained)
+    #define NS_RETURNS_NOT_RETAINED __attribute__((ns_returns_not_retained))
+  #else
+    #define NS_RETURNS_NOT_RETAINED
+  #endif
+#endif
+
+#ifndef CF_RETURNS_RETAINED
+  #if __has_feature(attribute_cf_returns_retained)
+    #define CF_RETURNS_RETAINED __attribute__((cf_returns_retained))
+  #else
+    #define CF_RETURNS_RETAINED
+  #endif
+#endif
+
+#ifndef CF_RETURNS_NOT_RETAINED
+  #if __has_feature(attribute_cf_returns_not_retained)
+    #define CF_RETURNS_NOT_RETAINED __attribute__((cf_returns_not_retained))
+  #else
+    #define CF_RETURNS_NOT_RETAINED
+  #endif
+#endif
+
+#ifndef NS_CONSUMED
+  #if __has_feature(attribute_ns_consumed)
+    #define NS_CONSUMED __attribute__((ns_consumed))
+  #else
+    #define NS_CONSUMED
+  #endif
+#endif
+
+#ifndef CF_CONSUMED
+  #if __has_feature(attribute_cf_consumed)
+    #define CF_CONSUMED __attribute__((cf_consumed))
+  #else
+    #define CF_CONSUMED
+  #endif
+#endif
+
+#ifndef NS_CONSUMES_SELF
+  #if __has_feature(attribute_ns_consumes_self)
+    #define NS_CONSUMES_SELF __attribute__((ns_consumes_self))
+  #else
+    #define NS_CONSUMES_SELF
+  #endif
+#endif
+
+#ifndef GTM_NONNULL
+  #if defined(__has_attribute)
+    #if __has_attribute(nonnull)
+      #define GTM_NONNULL(x) __attribute__((nonnull x))
+    #else
+      #define GTM_NONNULL(x)
+    #endif
+  #else
+    #define GTM_NONNULL(x)
+  #endif
+#endif
+
+// Invalidates the initializer from which it's called.
+#ifndef GTMInvalidateInitializer
+  #if __has_feature(objc_arc)
+    #define GTMInvalidateInitializer() \
+      do { \
+        [self class]; /* Avoid warning of dead store to |self|. */ \
+        _GTMDevAssert(NO, @"Invalid initializer."); \
+        return nil; \
+      } while (0)
+  #else
+    #define GTMInvalidateInitializer() \
+      do { \
+        [self release]; \
+        _GTMDevAssert(NO, @"Invalid initializer."); \
+        return nil; \
+      } while (0)
+  #endif
+#endif
+
+#ifndef GTMCFAutorelease
+  // GTMCFAutorelease returns an id.  In contrast, Apple's CFAutorelease returns
+  // a CFTypeRef.
+  #if __has_feature(objc_arc)
+    #define GTMCFAutorelease(x) CFBridgingRelease(x)
+  #else
+    #define GTMCFAutorelease(x) ([(id)x autorelease])
+  #endif
+#endif
+
+#ifdef __OBJC__
+
+
+// Macro to allow you to create NSStrings out of other macros.
+// #define FOO foo
+// NSString *fooString = GTM_NSSTRINGIFY(FOO);
+#if !defined (GTM_NSSTRINGIFY)
+  #define GTM_NSSTRINGIFY_INNER(x) @#x
+  #define GTM_NSSTRINGIFY(x) GTM_NSSTRINGIFY_INNER(x)
+#endif
+
+// ============================================================================
+
+// GTM_SEL_STRING is for specifying selector (usually property) names to KVC
+// or KVO methods.
+// In debug it will generate warnings for undeclared selectors if
+// -Wunknown-selector is turned on.
+// In release it will have no runtime overhead.
+#ifndef GTM_SEL_STRING
+  #ifdef DEBUG
+    #define GTM_SEL_STRING(selName) NSStringFromSelector(@selector(selName))
+  #else
+    #define GTM_SEL_STRING(selName) @#selName
+  #endif  // DEBUG
+#endif  // GTM_SEL_STRING
+
+#ifndef GTM_WEAK
+#if __has_feature(objc_arc_weak)
+    // With ARC enabled, __weak means a reference that isn't implicitly
+    // retained.  __weak objects are accessed through runtime functions, so
+    // they are zeroed out, but this requires OS X 10.7+.
+    // At clang r251041+, ARC-style zeroing weak references even work in
+    // non-ARC mode.
+    #define GTM_WEAK __weak
+  #elif __has_feature(objc_arc)
+    // ARC, but targeting 10.6 or older, where zeroing weak references don't
+    // exist.
+    #define GTM_WEAK __unsafe_unretained
+  #else
+    // With manual reference counting, __weak used to be silently ignored.
+    // clang r251041 gives it the ARC semantics instead.  This means they
+    // now require a deployment target of 10.7, while some clients of GTM
+    // still target 10.6.  In these cases, expand to __unsafe_unretained instead
+    #define GTM_WEAK
+  #endif
+#endif
+
+#endif  // __OBJC__

+ 79 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMLocalizedString.h

@@ -0,0 +1,79 @@
+//
+//  GTMLocalizedString.h
+//
+//  Copyright (c) 2010 Google Inc. All rights reserved.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+// The NSLocalizedString macros do not have NS_FORMAT_ARGUMENT modifiers put
+// on them which means you get warnings on Snow Leopard with when
+// GCC_WARN_TYPECHECK_CALLS_TO_PRINTF = YES and you do things like:
+// NSString *foo
+//   = [NSString stringWithFormat:NSLocalizedString(@"blah %@", nil), @"bar"];
+// The GTMLocalizedString functions fix that for you so you can do:
+// NSString *foo
+//   = [NSString stringWithFormat:GTMLocalizedString(@"blah %@", nil), @"bar"];
+// and you will compile cleanly.
+// If you use genstrings you can call it with
+// genstrings -s GTMLocalizedString ...
+// and it should work as expected.
+// You can override how GTM gets its localized strings (if you are using
+// something other than NSLocalizedString) by redefining
+// GTMLocalizedStringWithDefaultValueInternal.
+
+#ifndef GTMLocalizedStringWithDefaultValueInternal
+  #define GTMLocalizedStringWithDefaultValueInternal \
+      NSLocalizedStringWithDefaultValue
+#endif
+
+GTM_INLINE NS_FORMAT_ARGUMENT(1) NSString *GTMLocalizedString(
+    NSString *key,  NSString *comment) {
+  return GTMLocalizedStringWithDefaultValueInternal(key,
+                                                    nil,
+                                                    [NSBundle mainBundle],
+                                                    @"",
+                                                    comment);
+}
+
+GTM_INLINE NS_FORMAT_ARGUMENT(1) NSString *GTMLocalizedStringFromTable(
+    NSString *key, NSString *tableName, NSString *comment) {
+  return GTMLocalizedStringWithDefaultValueInternal(key,
+                                                    tableName,
+                                                    [NSBundle mainBundle],
+                                                    @"",
+                                                    comment);
+}
+
+GTM_INLINE NS_FORMAT_ARGUMENT(1) NSString *GTMLocalizedStringFromTableInBundle(
+    NSString *key,  NSString *tableName, NSBundle *bundle, NSString *comment) {
+  return GTMLocalizedStringWithDefaultValueInternal(key,
+                                                    tableName,
+                                                    bundle,
+                                                    @"",
+                                                    comment);
+}
+
+GTM_INLINE NS_FORMAT_ARGUMENT(1) NSString *GTMLocalizedStringWithDefaultValue(
+    NSString *key, NSString *tableName, NSBundle *bundle, NSString *value,
+    NSString *comment) {
+  return GTMLocalizedStringWithDefaultValueInternal(key,
+                                                    tableName,
+                                                    bundle,
+                                                    value,
+                                                    comment);
+}
+

+ 508 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMLogger.h

@@ -0,0 +1,508 @@
+//
+//  GTMLogger.h
+//
+//  Copyright 2007-2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+// Key Abstractions
+// ----------------
+//
+// This file declares multiple classes and protocols that are used by the
+// GTMLogger logging system. The 4 main abstractions used in this file are the
+// following:
+//
+//   * logger (GTMLogger) - The main logging class that users interact with. It
+//   has methods for logging at different levels and uses a log writer, a log
+//   formatter, and a log filter to get the job done.
+//
+//   * log writer (GTMLogWriter) - Writes a given string to some log file, where
+//   a "log file" can be a physical file on disk, a POST over HTTP to some URL,
+//   or even some in-memory structure (e.g., a ring buffer).
+//
+//   * log formatter (GTMLogFormatter) - Given a format string and arguments as
+//   a va_list, returns a single formatted NSString. A "formatted string" could
+//   be a string with the date prepended, a string with values in a CSV format,
+//   or even a string of XML.
+//
+//   * log filter (GTMLogFilter) - Given a formatted log message as an NSString
+//   and the level at which the message is to be logged, this class will decide
+//   whether the given message should be logged or not. This is a flexible way
+//   to filter out messages logged at a certain level, messages that contain
+//   certain text, or filter nothing out at all. This gives the caller the
+//   flexibility to dynamically enable debug logging in Release builds.
+//
+// This file also declares some classes to handle the common log writer, log
+// formatter, and log filter cases. Callers can also create their own writers,
+// formatters, and filters and they can even build them on top of the ones
+// declared here. Keep in mind that your custom writer/formatter/filter may be
+// called from multiple threads, so it must be thread-safe.
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+// Predeclaration of used protocols that are declared later in this file.
+@protocol GTMLogWriter, GTMLogFormatter, GTMLogFilter;
+
+// GTMLogger
+//
+// GTMLogger is the primary user-facing class for an object-oriented logging
+// system. It is built on the concept of log formatters (GTMLogFormatter), log
+// writers (GTMLogWriter), and log filters (GTMLogFilter). When a message is
+// sent to a GTMLogger to log a message, the message is formatted using the log
+// formatter, then the log filter is consulted to see if the message should be
+// logged, and if so, the message is sent to the log writer to be written out.
+//
+// GTMLogger is intended to be a flexible and thread-safe logging solution. Its
+// flexibility comes from the fact that GTMLogger instances can be customized
+// with user defined formatters, filters, and writers. And these writers,
+// filters, and formatters can be combined, stacked, and customized in arbitrary
+// ways to suit the needs at hand. For example, multiple writers can be used at
+// the same time, and a GTMLogger instance can even be used as another
+// GTMLogger's writer. This allows for arbitrarily deep logging trees.
+//
+// A standard GTMLogger uses a writer that sends messages to standard out, a
+// formatter that smacks a timestamp and a few other bits of interesting
+// information on the message, and a filter that filters out debug messages from
+// release builds. Using the standard log settings, a log message will look like
+// the following:
+//
+//   2007-12-30 10:29:24.177 myapp[4588/0xa07d0f60] [lvl=1] foo=<Foo: 0x123>
+//
+// The output contains the date and time of the log message, the name of the
+// process followed by its process ID/thread ID, the log level at which the
+// message was logged (in the previous example the level was 1:
+// kGTMLoggerLevelDebug), and finally, the user-specified log message itself (in
+// this case, the log message was @"foo=%@", foo).
+//
+// Multiple instances of GTMLogger can be created, each configured their own
+// way.  Though GTMLogger is not a singleton (in the GoF sense), it does provide
+// access to a shared (i.e., globally accessible) GTMLogger instance. This makes
+// it convenient for all code in a process to use the same GTMLogger instance.
+// The shared GTMLogger instance can also be configured in an arbitrary, and
+// these configuration changes will affect all code that logs through the shared
+// instance.
+
+//
+// Log Levels
+// ----------
+// GTMLogger has 3 different log levels: Debug, Info, and Error. GTMLogger
+// doesn't take any special action based on the log level; it simply forwards
+// this information on to formatters, filters, and writers, each of which may
+// optionally take action based on the level. Since log level filtering is
+// performed at runtime, log messages are typically not filtered out at compile
+// time.  The exception to this rule is that calls to the GTMLoggerDebug() macro
+// *ARE* filtered out of non-DEBUG builds. This is to be backwards compatible
+// with behavior that many developers are currently used to. Note that this
+// means that GTMLoggerDebug(@"hi") will be compiled out of Release builds, but
+// [[GTMLogger sharedLogger] logDebug:@"hi"] will NOT be compiled out.
+//
+// Standard loggers are created with the GTMLogLevelFilter log filter, which
+// filters out certain log messages based on log level, and some other settings.
+//
+// In addition to the -logDebug:, -logInfo:, and -logError: methods defined on
+// GTMLogger itself, there are also C macros that make usage of the shared
+// GTMLogger instance very convenient. These macros are:
+//
+//   GTMLoggerDebug(...)
+//   GTMLoggerInfo(...)
+//   GTMLoggerError(...)
+//
+// Again, a notable feature of these macros is that GTMLogDebug() calls *will be
+// compiled out of non-DEBUG builds*.
+//
+// Standard Loggers
+// ----------------
+// GTMLogger has the concept of "standard loggers". A standard logger is simply
+// a logger that is pre-configured with some standard/common writer, formatter,
+// and filter combination. Standard loggers are created using the creation
+// methods beginning with "standard". The alternative to a standard logger is a
+// regular logger, which will send messages to stdout, with no special
+// formatting, and no filtering.
+//
+// How do I use GTMLogger?
+// ----------------------
+// The typical way you will want to use GTMLogger is to simply use the
+// GTMLogger*() macros for logging from code. That way we can easily make
+// changes to the GTMLogger class and simply update the macros accordingly. Only
+// your application startup code (perhaps, somewhere in main()) should use the
+// GTMLogger class directly in order to configure the shared logger, which all
+// of the code using the macros will be using. Again, this is just the typical
+// situation.
+//
+// To be complete, there are cases where you may want to use GTMLogger directly,
+// or even create separate GTMLogger instances for some reason. That's fine,
+// too.
+//
+// Examples
+// --------
+// The following show some common GTMLogger use cases.
+//
+// 1. You want to log something as simply as possible. Also, this call will only
+//    appear in debug builds. In non-DEBUG builds it will be completely removed.
+//
+//      GTMLoggerDebug(@"foo = %@", foo);
+//
+// 2. The previous example is similar to the following. The major difference is
+//    that the previous call (example 1) will be compiled out of Release builds
+//    but this statement will not be compiled out.
+//
+//      [[GTMLogger sharedLogger] logDebug:@"foo = %@", foo];
+//
+// 3. Send all logging output from the shared logger to a file. We do this by
+//    creating an NSFileHandle for writing associated with a file, and setting
+//    that file handle as the logger's writer.
+//
+//      NSFileHandle *f = [NSFileHandle fileHandleForWritingAtPath:@"/tmp/f.log"
+//                                                          create:YES];
+//      [[GTMLogger sharedLogger] setWriter:f];
+//      GTMLoggerError(@"hi");  // This will be sent to /tmp/f.log
+//
+// 4. Create a new GTMLogger that will log to a file. This example differs from
+//    the previous one because here we create a new GTMLogger that is different
+//    from the shared logger.
+//
+//      GTMLogger *logger = [GTMLogger standardLoggerWithPath:@"/tmp/temp.log"];
+//      [logger logInfo:@"hi temp log file"];
+//
+// 5. Create a logger that writes to stdout and does NOT do any formatting to
+//    the log message. This might be useful, for example, when writing a help
+//    screen for a command-line tool to standard output.
+//
+//      GTMLogger *logger = [GTMLogger logger];
+//      [logger logInfo:@"%@ version 0.1 usage", progName];
+//
+// 6. Send log output to stdout AND to a log file. The trick here is that
+//    NSArrays function as composite log writers, which means when an array is
+//    set as the log writer, it forwards all logging messages to all of its
+//    contained GTMLogWriters.
+//
+//      // Create array of GTMLogWriters
+//      NSArray *writers = [NSArray arrayWithObjects:
+//          [NSFileHandle fileHandleForWritingAtPath:@"/tmp/f.log" create:YES],
+//          [NSFileHandle fileHandleWithStandardOutput], nil];
+//
+//      GTMLogger *logger = [GTMLogger standardLogger];
+//      [logger setWriter:writers];
+//      [logger logInfo:@"hi"];  // Output goes to stdout and /tmp/f.log
+//
+// For futher details on log writers, formatters, and filters, see the
+// documentation below.
+//
+// NOTE: GTMLogger is application level logging.  By default it does nothing
+// with _GTMDevLog/_GTMDevAssert (see GTMDefines.h).  An application can choose
+// to bridge _GTMDevLog/_GTMDevAssert to GTMLogger by providing macro
+// definitions in its prefix header (see GTMDefines.h for how one would do
+// that).
+//
+@interface GTMLogger : NSObject {
+ @private
+  id<GTMLogWriter> writer_;
+  id<GTMLogFormatter> formatter_;
+  id<GTMLogFilter> filter_;
+}
+
+//
+// Accessors for the shared logger instance
+//
+
+// Returns a shared/global standard GTMLogger instance. Callers should typically
+// use this method to get a GTMLogger instance, unless they explicitly want
+// their own instance to configure for their own needs. This is the only method
+// that returns a shared instance; all the rest return new GTMLogger instances.
++ (id)sharedLogger;
+
+// Sets the shared logger instance to |logger|. Future calls to +sharedLogger
+// will return |logger| instead.
++ (void)setSharedLogger:(GTMLogger *)logger;
+
+//
+// Creation methods
+//
+
+// Returns a new autoreleased GTMLogger instance that will log to stdout, using
+// the GTMLogStandardFormatter, and the GTMLogLevelFilter filter.
++ (id)standardLogger;
+
+// Same as +standardLogger, but logs to stderr.
++ (id)standardLoggerWithStderr;
+
+// Same as +standardLogger but levels >= kGTMLoggerLevelError are routed to
+// stderr, everything else goes to stdout.
++ (id)standardLoggerWithStdoutAndStderr;
+
+// Returns a new standard GTMLogger instance with a log writer that will
+// write to the file at |path|, and will use the GTMLogStandardFormatter and
+// GTMLogLevelFilter classes. If |path| does not exist, it will be created.
++ (id)standardLoggerWithPath:(NSString *)path;
+
+// Returns an autoreleased GTMLogger instance that will use the specified
+// |writer|, |formatter|, and |filter|.
++ (id)loggerWithWriter:(id<GTMLogWriter>)writer
+             formatter:(id<GTMLogFormatter>)formatter
+                filter:(id<GTMLogFilter>)filter;
+
+// Returns an autoreleased GTMLogger instance that logs to stdout, with the
+// basic formatter, and no filter. The returned logger differs from the logger
+// returned by +standardLogger because this one does not do any filtering and
+// does not do any special log formatting; this is the difference between a
+// "regular" logger and a "standard" logger.
++ (id)logger;
+
+// Designated initializer. This method returns a GTMLogger initialized with the
+// specified |writer|, |formatter|, and |filter|. See the setter methods below
+// for what values will be used if nil is passed for a parameter.
+- (id)initWithWriter:(id<GTMLogWriter>)writer
+           formatter:(id<GTMLogFormatter>)formatter
+              filter:(id<GTMLogFilter>)filter;
+
+//
+// Logging  methods
+//
+
+// Logs a message at the debug level (kGTMLoggerLevelDebug).
+- (void)logDebug:(NSString *)fmt, ... NS_FORMAT_FUNCTION(1, 2);
+// Logs a message at the info level (kGTMLoggerLevelInfo).
+- (void)logInfo:(NSString *)fmt, ... NS_FORMAT_FUNCTION(1, 2);
+// Logs a message at the error level (kGTMLoggerLevelError).
+- (void)logError:(NSString *)fmt, ... NS_FORMAT_FUNCTION(1, 2);
+// Logs a message at the assert level (kGTMLoggerLevelAssert).
+- (void)logAssert:(NSString *)fmt, ... NS_FORMAT_FUNCTION(1, 2);
+
+
+//
+// Accessors
+//
+
+// Accessor methods for the log writer. If the log writer is set to nil,
+// [NSFileHandle fileHandleWithStandardOutput] is used.
+- (id<GTMLogWriter>)writer;
+- (void)setWriter:(id<GTMLogWriter>)writer;
+
+// Accessor methods for the log formatter. If the log formatter is set to nil,
+// GTMLogBasicFormatter is used. This formatter will format log messages in a
+// plain printf style.
+- (id<GTMLogFormatter>)formatter;
+- (void)setFormatter:(id<GTMLogFormatter>)formatter;
+
+// Accessor methods for the log filter. If the log filter is set to nil,
+// GTMLogNoFilter is used, which allows all log messages through.
+- (id<GTMLogFilter>)filter;
+- (void)setFilter:(id<GTMLogFilter>)filter;
+
+@end  // GTMLogger
+
+
+// Helper functions that are used by the convenience GTMLogger*() macros that
+// enable the logging of function names.
+@interface GTMLogger (GTMLoggerMacroHelpers)
+- (void)logFuncDebug:(const char *)func msg:(NSString *)fmt, ...
+  NS_FORMAT_FUNCTION(2, 3);
+- (void)logFuncInfo:(const char *)func msg:(NSString *)fmt, ...
+  NS_FORMAT_FUNCTION(2, 3);
+- (void)logFuncError:(const char *)func msg:(NSString *)fmt, ...
+  NS_FORMAT_FUNCTION(2, 3);
+- (void)logFuncAssert:(const char *)func msg:(NSString *)fmt, ...
+  NS_FORMAT_FUNCTION(2, 3);
+@end  // GTMLoggerMacroHelpers
+
+
+// The convenience macros are only defined if they haven't already been defined.
+#ifndef GTMLoggerInfo
+
+// Convenience macros that log to the shared GTMLogger instance. These macros
+// are how users should typically log to GTMLogger. Notice that GTMLoggerDebug()
+// calls will be compiled out of non-Debug builds.
+#define GTMLoggerDebug(...)  \
+  [[GTMLogger sharedLogger] logFuncDebug:__func__ msg:__VA_ARGS__]
+#define GTMLoggerInfo(...)   \
+  [[GTMLogger sharedLogger] logFuncInfo:__func__ msg:__VA_ARGS__]
+#define GTMLoggerError(...)  \
+  [[GTMLogger sharedLogger] logFuncError:__func__ msg:__VA_ARGS__]
+#define GTMLoggerAssert(...) \
+  [[GTMLogger sharedLogger] logFuncAssert:__func__ msg:__VA_ARGS__]
+
+// If we're not in a debug build, remove the GTMLoggerDebug statements. This
+// makes calls to GTMLoggerDebug "compile out" of Release builds
+#ifndef DEBUG
+#undef GTMLoggerDebug
+#define GTMLoggerDebug(...) do {} while(0)
+#endif
+
+#endif  // !defined(GTMLoggerInfo)
+
+// Log levels.
+typedef enum {
+  kGTMLoggerLevelUnknown,
+  kGTMLoggerLevelDebug,
+  kGTMLoggerLevelInfo,
+  kGTMLoggerLevelError,
+  kGTMLoggerLevelAssert,
+} GTMLoggerLevel;
+
+
+//
+//   Log Writers
+//
+
+// Protocol to be implemented by a GTMLogWriter instance.
+@protocol GTMLogWriter <NSObject>
+// Writes the given log message to where the log writer is configured to write.
+- (void)logMessage:(NSString *)msg level:(GTMLoggerLevel)level;
+@end  // GTMLogWriter
+
+
+// Simple category on NSFileHandle that makes NSFileHandles valid log writers.
+// This is convenient because something like, say, +fileHandleWithStandardError
+// now becomes a valid log writer. Log messages are written to the file handle
+// with a newline appended.
+@interface NSFileHandle (GTMFileHandleLogWriter) <GTMLogWriter>
+// Opens the file at |path| in append mode, and creates the file with |mode|
+// if it didn't previously exist.
++ (id)fileHandleForLoggingAtPath:(NSString *)path mode:(mode_t)mode;
+@end  // NSFileHandle
+
+
+// This category makes NSArray a GTMLogWriter that can be composed of other
+// GTMLogWriters. This is the classic Composite GoF design pattern. When the
+// GTMLogWriter -logMessage:level: message is sent to the array, the array
+// forwards the message to all of its elements that implement the GTMLogWriter
+// protocol.
+//
+// This is useful in situations where you would like to send log output to
+// multiple log writers at the same time. Simply create an NSArray of the log
+// writers you wish to use, then set the array as the "writer" for your
+// GTMLogger instance.
+@interface NSArray (GTMArrayCompositeLogWriter) <GTMLogWriter>
+@end  // GTMArrayCompositeLogWriter
+
+
+// This category adapts the GTMLogger interface so that it can be used as a log
+// writer; it's an "adapter" in the GoF Adapter pattern sense.
+//
+// This is useful when you want to configure a logger to log to a specific
+// writer with a specific formatter and/or filter. But you want to also compose
+// that with a different log writer that may have its own formatter and/or
+// filter.
+@interface GTMLogger (GTMLoggerLogWriter) <GTMLogWriter>
+@end  // GTMLoggerLogWriter
+
+
+//
+//   Log Formatters
+//
+
+// Protocol to be implemented by a GTMLogFormatter instance.
+@protocol GTMLogFormatter <NSObject>
+// Returns a formatted string using the format specified in |fmt| and the va
+// args specified in |args|.
+- (NSString *)stringForFunc:(NSString *)func
+                 withFormat:(NSString *)fmt
+                     valist:(va_list)args
+                      level:(GTMLoggerLevel)level NS_FORMAT_FUNCTION(2, 0);
+@end  // GTMLogFormatter
+
+
+// A basic log formatter that formats a string the same way that NSLog (or
+// printf) would. It does not do anything fancy, nor does it add any data of its
+// own.
+@interface GTMLogBasicFormatter : NSObject <GTMLogFormatter>
+
+// Helper method for prettying C99 __func__ and GCC __PRETTY_FUNCTION__
+- (NSString *)prettyNameForFunc:(NSString *)func;
+
+@end  // GTMLogBasicFormatter
+
+
+// A log formatter that formats the log string like the basic formatter, but
+// also prepends a timestamp and some basic process info to the message, as
+// shown in the following sample output.
+//   2007-12-30 10:29:24.177 myapp[4588/0xa07d0f60] [lvl=1] log mesage here
+@interface GTMLogStandardFormatter : GTMLogBasicFormatter {
+ @private
+  NSDateFormatter *dateFormatter_;  // yyyy-MM-dd HH:mm:ss.SSS
+  NSString *pname_;
+  pid_t pid_;
+}
+@end  // GTMLogStandardFormatter
+
+
+//
+//   Log Filters
+//
+
+// Protocol to be implemented by a GTMLogFilter instance.
+@protocol GTMLogFilter <NSObject>
+// Returns YES if |msg| at |level| should be logged; NO otherwise.
+- (BOOL)filterAllowsMessage:(NSString *)msg level:(GTMLoggerLevel)level;
+@end  // GTMLogFilter
+
+
+// A log filter that filters messages at the kGTMLoggerLevelDebug level out of
+// non-debug builds. Messages at the kGTMLoggerLevelInfo level are also filtered
+// out of non-debug builds unless GTMVerboseLogging is set in the environment or
+// the processes's defaults. Messages at the kGTMLoggerLevelError level are
+// never filtered.
+@interface GTMLogLevelFilter : NSObject <GTMLogFilter> {
+ @private
+  BOOL verboseLoggingEnabled_;
+  NSUserDefaults *userDefaults_;
+}
+@end  // GTMLogLevelFilter
+
+// A simple log filter that does NOT filter anything out;
+// -filterAllowsMessage:level will always return YES. This can be a convenient
+// way to enable debug-level logging in release builds (if you so desire).
+@interface GTMLogNoFilter : NSObject <GTMLogFilter>
+@end  // GTMLogNoFilter
+
+
+// Base class for custom level filters. Not for direct use, use the minimum
+// or maximum level subclasses below.
+@interface GTMLogAllowedLevelFilter : NSObject <GTMLogFilter> {
+ @private
+  NSIndexSet *allowedLevels_;
+}
+@end
+
+// A log filter that allows you to set a minimum log level. Messages below this
+// level will be filtered.
+@interface GTMLogMininumLevelFilter : GTMLogAllowedLevelFilter
+
+// Designated initializer, logs at levels < |level| will be filtered.
+- (id)initWithMinimumLevel:(GTMLoggerLevel)level;
+
+@end
+
+// A log filter that allows you to set a maximum log level. Messages whose level
+// exceeds this level will be filtered. This is really only useful if you have
+// a composite GTMLogger that is sending the other messages elsewhere.
+@interface GTMLogMaximumLevelFilter : GTMLogAllowedLevelFilter
+
+// Designated initializer, logs at levels > |level| will be filtered.
+- (id)initWithMaximumLevel:(GTMLoggerLevel)level;
+
+@end
+
+
+// For subclasses only
+@interface GTMLogger (PrivateMethods)
+
+- (void)logInternalFunc:(const char *)func
+                 format:(NSString *)fmt
+                 valist:(va_list)args
+                  level:(GTMLoggerLevel)level NS_FORMAT_FUNCTION(2, 0);
+
+@end
+

+ 69 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMMethodCheck.h

@@ -0,0 +1,69 @@
+//
+//  GTMMethodCheck.h
+//
+//  Copyright 2006-2016 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+#import <stdio.h>
+#import <sysexits.h>
+
+/// A macro for enforcing debug time checks to make sure all required methods are linked in
+//
+// When using categories, it can be very easy to forget to include the
+// implementation of a category.
+// Let's say you had a class foo that depended on method bar of class baz, and
+// method bar was implemented as a member of a category.
+// You could add the following code:
+//
+// GTM_METHOD_CHECK(baz, bar)
+//
+// and the code would check to make sure baz was implemented just before main
+// was called. This works for both dynamic libraries, and executables.
+//
+//
+// This is not compiled into release builds.
+
+#ifdef DEBUG
+
+// This is the "magic".
+// A) we need a multi layer define here so that the preprocessor expands
+//    __LINE__ the way we want it. We need __LINE__ so that each of our
+//    GTM_METHOD_CHECKs generates a unique function name.
+#define GTM_METHOD_CHECK(class, method) GTM_METHOD_CHECK_INNER(class, method, __LINE__)
+#define GTM_METHOD_CHECK_INNER(class, method, line) \
+    GTM_METHOD_CHECK_INNER_INNER(class, method, line)
+
+// B) define a function that is called at startup to check that |class| has an
+//    implementation for |method| (either a class method or an instance method).
+#define GTM_METHOD_CHECK_INNER_INNER(class, method, line) \
+__attribute__ ((constructor, visibility("hidden"))) \
+    static void xxGTMMethodCheckMethod ## class ## line () { \
+  @autoreleasepool { \
+    if (![class instancesRespondToSelector:@selector(method)] \
+        && ![class respondsToSelector:@selector(method)]) { \
+      fprintf(stderr, "%s:%d: error: We need method '%s' to be linked in for class '%s'\n", \
+              __FILE__, line, #method, #class); \
+      exit(EX_SOFTWARE); \
+    } \
+  } \
+}
+
+#else  // DEBUG
+
+// Do nothing in release.
+#define GTM_METHOD_CHECK(class, method)
+
+#endif  // DEBUG

+ 199 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSData+zlib.h

@@ -0,0 +1,199 @@
+//
+//  GTMNSData+zlib.h
+//
+//  Copyright 2007-2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+/// Helpers for dealing w/ zlib inflate/deflate calls.
+@interface NSData (GTMZLibAdditions)
+
+// NOTE: For 64bit, none of these apis handle input sizes >32bits, they will
+// return nil when given such data.  To handle data of that size you really
+// should be streaming it rather then doing it all in memory.
+
+#pragma mark Gzip Compression
+
+/// Return an autoreleased NSData w/ the result of gzipping the bytes.
+//
+//  Uses the default compression level.
++ (NSData *)gtm_dataByGzippingBytes:(const void *)bytes
+                             length:(NSUInteger)length __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByGzippingBytes:(const void *)bytes
+                             length:(NSUInteger)length
+                              error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of gzipping the payload of |data|.
+//
+//  Uses the default compression level.
++ (NSData *)gtm_dataByGzippingData:(NSData *)data __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByGzippingData:(NSData *)data
+                             error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of gzipping the bytes using |level| compression level.
+//
+// |level| can be 1-9, any other values will be clipped to that range.
++ (NSData *)gtm_dataByGzippingBytes:(const void *)bytes
+                             length:(NSUInteger)length
+                   compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByGzippingBytes:(const void *)bytes
+                             length:(NSUInteger)length
+                   compressionLevel:(int)level
+                              error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of gzipping the payload of |data| using |level| compression level.
++ (NSData *)gtm_dataByGzippingData:(NSData *)data
+                  compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByGzippingData:(NSData *)data
+                  compressionLevel:(int)level
+                             error:(NSError **)error;
+
+#pragma mark Zlib "Stream" Compression
+
+// NOTE: deflate is *NOT* gzip.  deflate is a "zlib" stream.  pick which one
+// you really want to create.  (the inflate api will handle either)
+
+/// Return an autoreleased NSData w/ the result of deflating the bytes.
+//
+//  Uses the default compression level.
++ (NSData *)gtm_dataByDeflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByDeflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length
+                               error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of deflating the payload of |data|.
+//
+//  Uses the default compression level.
++ (NSData *)gtm_dataByDeflatingData:(NSData *)data __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByDeflatingData:(NSData *)data
+                              error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of deflating the bytes using |level| compression level.
+//
+// |level| can be 1-9, any other values will be clipped to that range.
++ (NSData *)gtm_dataByDeflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length
+                    compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByDeflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length
+                    compressionLevel:(int)level
+                               error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of deflating the payload of |data| using |level| compression level.
++ (NSData *)gtm_dataByDeflatingData:(NSData *)data
+                   compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByDeflatingData:(NSData *)data
+                   compressionLevel:(int)level
+                              error:(NSError **)error;
+
+#pragma mark Uncompress of Gzip or Zlib
+
+/// Return an autoreleased NSData w/ the result of decompressing the bytes.
+//
+// The bytes to decompress can be zlib or gzip payloads.
++ (NSData *)gtm_dataByInflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByInflatingBytes:(const void *)bytes
+                              length:(NSUInteger)length
+                               error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of decompressing the payload of |data|.
+//
+// The data to decompress can be zlib or gzip payloads.
++ (NSData *)gtm_dataByInflatingData:(NSData *)data __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByInflatingData:(NSData *)data
+                              error:(NSError **)error;
+
+#pragma mark "Raw" Compression Support
+
+// NOTE: raw deflate is *NOT* gzip or deflate.  it does not include a header
+// of any form and should only be used within streams here an external crc/etc.
+// is done to validate the data.  The RawInflate apis can be used on data
+// processed like this.
+
+/// Return an autoreleased NSData w/ the result of *raw* deflating the bytes.
+//
+//  Uses the default compression level.
+//  *No* header is added to the resulting data.
++ (NSData *)gtm_dataByRawDeflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawDeflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length
+                                  error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of *raw* deflating the payload of |data|.
+//
+//  Uses the default compression level.
+//  *No* header is added to the resulting data.
++ (NSData *)gtm_dataByRawDeflatingData:(NSData *)data __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawDeflatingData:(NSData *)data
+                                 error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of *raw* deflating the bytes using |level| compression level.
+//
+// |level| can be 1-9, any other values will be clipped to that range.
+//  *No* header is added to the resulting data.
++ (NSData *)gtm_dataByRawDeflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length
+                       compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawDeflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length
+                       compressionLevel:(int)level
+                                  error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of *raw* deflating the payload of |data| using |level| compression level.
+//  *No* header is added to the resulting data.
++ (NSData *)gtm_dataByRawDeflatingData:(NSData *)data
+                      compressionLevel:(int)level __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawDeflatingData:(NSData *)data
+                      compressionLevel:(int)level
+                                 error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of *raw* decompressing the bytes.
+//
+// The data to decompress, it should *not* have any header (zlib nor gzip).
++ (NSData *)gtm_dataByRawInflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawInflatingBytes:(const void *)bytes
+                                 length:(NSUInteger)length
+                                  error:(NSError **)error;
+
+/// Return an autoreleased NSData w/ the result of *raw* decompressing the payload of |data|.
+//
+// The data to decompress, it should *not* have any header (zlib nor gzip).
++ (NSData *)gtm_dataByRawInflatingData:(NSData *)data __attribute__((deprecated("Use error variant")));
++ (NSData *)gtm_dataByRawInflatingData:(NSData *)data
+                                 error:(NSError **)error;
+
+@end
+
+FOUNDATION_EXPORT NSString *const GTMNSDataZlibErrorDomain;
+FOUNDATION_EXPORT NSString *const GTMNSDataZlibErrorKey;  // NSNumber
+FOUNDATION_EXPORT NSString *const GTMNSDataZlibRemainingBytesKey;  // NSNumber
+
+typedef NS_ENUM(NSInteger, GTMNSDataZlibError) {
+  GTMNSDataZlibErrorGreaterThan32BitsToCompress = 1024,
+  // An internal zlib error.
+  // GTMNSDataZlibErrorKey will contain the error value.
+  // NSLocalizedDescriptionKey may contain an error string from zlib.
+  // Look in zlib.h for list of errors.
+  GTMNSDataZlibErrorInternal,
+  // There was left over data in the buffer that was not used.
+  // GTMNSDataZlibRemainingBytesKey will contain number of remaining bytes.
+  GTMNSDataZlibErrorDataRemaining
+};

+ 40 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSDictionary+URLArguments.h

@@ -0,0 +1,40 @@
+//
+//  GTMNSDictionary+URLArguments.h
+//
+//  Copyright 2006-2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+
+/// Utility for building a URL or POST argument string.
+@interface NSDictionary (GTMNSDictionaryURLArgumentsAdditions)
+
+/// Returns a dictionary of the decoded key-value pairs in a http arguments
+/// string of the form key1=value1&key2=value2&...&keyN=valueN.
+/// Keys and values will be unescaped automatically.
+/// Only the first value for a repeated key is returned.
+///
+/// NOTE: Apps targeting iOS 8 or OS X 10.10 and later should use
+///       NSURLComponents and NSURLQueryItem to create URLs with
+///       query arguments instead of using these category methods.
++ (NSDictionary *)gtm_dictionaryWithHttpArgumentsString:(NSString *)argString NS_DEPRECATED(10_0, 10_10, 2_0, 8_0, "Use NSURLComponents and NSURLQueryItem.");
+
+/// Gets a string representation of the dictionary in the form
+/// key1=value1&key2=value2&...&keyN=valueN, suitable for use as either
+/// URL arguments (after a '?') or POST body. Keys and values will be escaped
+/// automatically, so should be unescaped in the dictionary.
+- (NSString *)gtm_httpArgumentsString NS_DEPRECATED(10_0, 10_10, 2_0, 8_0, "Use NSURLComponents and NSURLQueryItem.");
+
+@end

+ 45 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMNSString+URLArguments.h

@@ -0,0 +1,45 @@
+//
+//  GTMNSString+URLArguments.h
+//
+//  Copyright 2006-2008 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+
+/// Utilities for encoding and decoding URL arguments.
+@interface NSString (GTMNSStringURLArgumentsAdditions)
+
+/// Returns a string that is escaped properly to be a URL argument.
+///
+/// This differs from stringByAddingPercentEscapesUsingEncoding: in that it
+/// will escape all the reserved characters (per RFC 3986
+/// <http://www.ietf.org/rfc/rfc3986.txt>) which
+/// stringByAddingPercentEscapesUsingEncoding would leave.
+///
+/// This will also escape '%', so this should not be used on a string that has
+/// already been escaped unless double-escaping is the desired result.
+///
+/// NOTE: Apps targeting iOS 8 or OS X 10.10 and later should use
+///       NSURLComponents and NSURLQueryItem to create properly-escaped
+///       URLs instead of using these category methods.
+- (NSString*)gtm_stringByEscapingForURLArgument NS_DEPRECATED(10_0, 10_10, 2_0, 8_0, "Use NSURLComponents.");
+
+/// Returns the unescaped version of a URL argument
+///
+/// This has the same behavior as stringByReplacingPercentEscapesUsingEncoding:,
+/// except that it will also convert '+' to space.
+- (NSString*)gtm_stringByUnescapingFromURLArgument NS_DEPRECATED(10_0, 10_10, 2_0, 8_0, "Use NSURLComponents.");
+
+@end

+ 112 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMStringEncoding.h

@@ -0,0 +1,112 @@
+//
+//  GTMStringEncoding.h
+//
+//  Copyright 2010 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+// A generic class for arbitrary base-2 to 128 string encoding and decoding.
+@interface GTMStringEncoding : NSObject {
+ @private
+  NSData *charMapData_;
+  char *charMap_;
+  int reverseCharMap_[128];
+  int shift_;
+  int mask_;
+  BOOL doPad_;
+  char paddingChar_;
+  int padLen_;
+}
+
+// Create a new, autoreleased GTMStringEncoding object with a standard encoding.
++ (id)binaryStringEncoding;
++ (id)hexStringEncoding;
++ (id)rfc4648Base32StringEncoding;
++ (id)rfc4648Base32HexStringEncoding;
++ (id)crockfordBase32StringEncoding;
++ (id)rfc4648Base64StringEncoding;
++ (id)rfc4648Base64WebsafeStringEncoding;
+
+// Create a new, autoreleased GTMStringEncoding object with the given string,
+// as described below.
++ (id)stringEncodingWithString:(NSString *)string;
+
+// Initialize a new GTMStringEncoding object with the string.
+//
+// The length of the string must be a power of 2, at least 2 and at most 128.
+// Only 7-bit ASCII characters are permitted in the string.
+//
+// These characters are the canonical set emitted during encoding.
+// If the characters have alternatives (e.g. case, easily transposed) then use
+// addDecodeSynonyms: to configure them.
+- (id)initWithString:(NSString *)string;
+
+// Add decoding synonyms as specified in the synonyms argument.
+//
+// It should be a sequence of one previously reverse mapped character,
+// followed by one or more non-reverse mapped character synonyms.
+// Only 7-bit ASCII characters are permitted in the string.
+//
+// e.g. If a GTMStringEncoder object has already been initialised with a set
+// of characters excluding I, L and O (to avoid confusion with digits) and you
+// want to accept them as digits you can call addDecodeSynonyms:@"0oO1iIlL".
+- (void)addDecodeSynonyms:(NSString *)synonyms;
+
+// A sequence of characters to ignore if they occur during encoding.
+// Only 7-bit ASCII characters are permitted in the string.
+- (void)ignoreCharacters:(NSString *)chars;
+
+// Indicates whether padding is performed during encoding.
+- (BOOL)doPad;
+- (void)setDoPad:(BOOL)doPad;
+
+// Sets the padding character to use during encoding.
+- (void)setPaddingChar:(char)c;
+
+// Encode a raw binary buffer to a 7-bit ASCII string.
+- (NSString *)encode:(NSData *)data __attribute__((deprecated("Use encode:error:")));
+- (NSString *)encodeString:(NSString *)string __attribute__((deprecated("Use encodeString:error:")));
+
+- (NSString *)encode:(NSData *)data error:(NSError **)error;
+- (NSString *)encodeString:(NSString *)string error:(NSError **)error;
+
+// Decode a 7-bit ASCII string to a raw binary buffer.
+- (NSData *)decode:(NSString *)string __attribute__((deprecated("Use decode:error:")));
+- (NSString *)stringByDecoding:(NSString *)string __attribute__((deprecated("Use stringByDecoding:error:")));
+
+- (NSData *)decode:(NSString *)string error:(NSError **)error;
+- (NSString *)stringByDecoding:(NSString *)string error:(NSError **)error;
+
+@end
+
+FOUNDATION_EXPORT NSString *const GTMStringEncodingErrorDomain;
+FOUNDATION_EXPORT NSString *const GTMStringEncodingBadCharacterIndexKey;  // NSNumber
+
+typedef NS_ENUM(NSInteger, GTMStringEncodingError) {
+  // Unable to convert a buffer to NSASCIIStringEncoding.
+  GTMStringEncodingErrorUnableToConverToAscii = 1024,
+  // Unable to convert a buffer to NSUTF8StringEncoding.
+  GTMStringEncodingErrorUnableToConverToUTF8,
+  // Encountered a bad character.
+  // GTMStringEncodingBadCharacterIndexKey will have the index of the character.
+  GTMStringEncodingErrorUnknownCharacter,
+  // The data had a padding character in the middle of the data. Padding characters
+  // can only be at the end.
+  GTMStringEncodingErrorExpectedPadding,
+  // There is unexpected data at the end of the data that could not be decoded.
+  GTMStringEncodingErrorIncompleteTrailingData,
+};

+ 71 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMTypeCasting.h

@@ -0,0 +1,71 @@
+//
+//  GTMTypeCasting.h
+//
+//  Copyright 2010 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations under
+//  the License.
+//
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+// These are some basic macros for making down-casting safer in Objective C.
+// They are loosely based on the same cast types with similar names in C++.
+// A typical usage would look like this:
+//
+// Bar* b = [[Bar alloc] init];
+// Foo* a = GTM_STATIC_CAST(Foo, b);
+//
+// Note that it's GTM_STATIC_CAST(Foo, b) and not GTM_STATIC_CAST(Foo*, b).
+//
+// GTM_STATIC_CAST runs only in debug mode, and will assert if and only if:
+//   - object is non nil
+//   - [object isKindOfClass:[cls class]] returns nil
+//
+// otherwise it returns object.
+//
+// GTM_DYNAMIC_CAST runs in both debug and release and will return nil if
+//   - object is nil
+//   - [object isKindOfClass:[cls class]] returns nil
+//
+// otherwise it returns object.
+//
+
+// Support functions for dealing with casting.
+GTM_INLINE id GTMDynamicCastSupport(Class cls, id object) {
+  _GTMDevAssert(cls, @"Nil Class");
+  return [object isKindOfClass:cls] ? object : nil;
+}
+
+GTM_INLINE id GTMStaticCastSupport(Class cls, id object) {
+  id value = nil;
+  if (object) {
+    value = GTMDynamicCastSupport(cls, object);
+    _GTMDevAssert(value, @"Could not cast %@ to class %@", object, cls);
+  }
+  return value;
+}
+
+#ifndef GTM_STATIC_CAST
+  #ifdef DEBUG
+    #define GTM_STATIC_CAST(type, object) \
+      ((type *) GTMStaticCastSupport([type class], object))
+  #else
+    #define GTM_STATIC_CAST(type, object) ((type *) (object))
+  #endif
+#endif
+
+#ifndef GTM_DYNAMIC_CAST
+  #define GTM_DYNAMIC_CAST(type, object) \
+    ((type *) GTMDynamicCastSupport([type class], object))
+#endif

+ 73 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Headers/GTMURLBuilder.h

@@ -0,0 +1,73 @@
+//
+//  GTMURLBuilder.h
+//
+//  Copyright 2012 Google Inc.
+//
+//  Licensed under the Apache License, Version 2.0 (the "License"); you may not
+//  use this file except in compliance with the License.  You may obtain a copy
+//  of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//  Unless required by applicable law or agreed to in writing, software
+//  distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+//  WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+//  License for the specific language governing permissions and limitations
+//  under the License.
+//
+
+//
+// Class for creating URLs. It handles URL encoding of parameters.
+//
+// Usage example:
+//
+// GTMURLBuilder *URLBuilder =
+//     [GTMURLBuilder builderWithString:@"http://www.google.com"];
+// [URLBuilder setValue:@"abc" forParameter:@"q"];
+// NSURL *URL = [URLBuilder URL];
+//
+// NOTE: Apps targeting iOS 8 or OS X 10.10 and later should use
+//       NSURLComponents and NSURLQueryItem to create URLs with
+//       query arguments instead of using this class.
+
+
+#import <Foundation/Foundation.h>
+#import "GTMDefines.h"
+
+#if (!TARGET_OS_IPHONE && defined(MAC_OS_X_VERSION_10_10) && MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_10) \
+|| (TARGET_OS_IPHONE && defined(__IPHONE_8_0) && __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_8_0)
+__deprecated_msg("GTMURLBuilder is obsolete; update your app to use NSURLComponents queryItems property instead.")
+#endif
+@interface GTMURLBuilder : NSObject {
+ @private
+  NSMutableDictionary *params_;
+}
+
+@property(nonatomic, readonly) NSString *baseURLString;
+
+// |URLString| is expected to be a valid URL with already escaped parameter
+// values.
++ (GTMURLBuilder *)builderWithString:(NSString *)URLString;
++ (GTMURLBuilder *)builderWithURL:(NSURL *)URL;
+
+// |URLString| The base URL to which parameters will be appended.
+// If the URL already contains parameters, they should already be encoded.
+- (id)initWithString:(NSString *)URLString;
+- (void)setValue:(NSString *)value forParameter:(NSString *)parameter;
+- (void)setIntegerValue:(NSInteger)value forParameter:(NSString *)parameter;
+- (NSString *)valueForParameter:(NSString *)parameter;
+// Returns 0 if there is no value for |parameter| or if the value cannot
+// be parsed into an NSInteger. Use valueForParameter if you want to make
+// sure that the value is set before attempting the parsing.
+- (NSInteger)integerValueForParameter:(NSString *)parameter;
+- (void)removeParameter:(NSString *)parameter;
+- (void)setParameters:(NSDictionary *)parameters;
+- (NSDictionary *)parameters;
+- (NSURL *)URL;
+- (NSString *)URLString;
+
+// Case-sensitive comparison of the URL. Also protocol and host are compared
+// as case-sensitive strings. The order of URL parameters is ignored.
+- (BOOL)isEqual:(GTMURLBuilder *)URLBuilder;
+
+@end

+ 6 - 0
Libraries external/Firebase/MLVision/GoogleToolboxForMac.framework/Modules/module.modulemap

@@ -0,0 +1,6 @@
+framework module GoogleToolboxForMac {
+umbrella header "GoogleToolboxForMac.h"
+export *
+module * { export * }
+  link "z"
+}

+ 183 - 0
Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Any.pbobjc.h

@@ -0,0 +1,183 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/any.proto
+
+// This CPP symbol can be defined to use imports that match up to the framework
+// imports needed when using CocoaPods.
+#if !defined(GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS)
+ #define GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS 0
+#endif
+
+#if GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS
+ #import <Protobuf/GPBDescriptor.h>
+ #import <Protobuf/GPBMessage.h>
+ #import <Protobuf/GPBRootObject.h>
+#else
+ #import "GPBDescriptor.h"
+ #import "GPBMessage.h"
+ #import "GPBRootObject.h"
+#endif
+
+#if GOOGLE_PROTOBUF_OBJC_VERSION < 30002
+#error This file was generated by a newer version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+#if 30002 < GOOGLE_PROTOBUF_OBJC_MIN_SUPPORTED_VERSION
+#error This file was generated by an older version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+
+// @@protoc_insertion_point(imports)
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+
+CF_EXTERN_C_BEGIN
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - GPBAnyRoot
+
+/**
+ * Exposes the extension registry for this file.
+ *
+ * The base class provides:
+ * @code
+ *   + (GPBExtensionRegistry *)extensionRegistry;
+ * @endcode
+ * which is a @c GPBExtensionRegistry that includes all the extensions defined by
+ * this file and all files that it depends on.
+ **/
+@interface GPBAnyRoot : GPBRootObject
+@end
+
+#pragma mark - GPBAny
+
+typedef GPB_ENUM(GPBAny_FieldNumber) {
+  GPBAny_FieldNumber_TypeURL = 1,
+  GPBAny_FieldNumber_Value = 2,
+};
+
+/**
+ * `Any` contains an arbitrary serialized protocol buffer message along with a
+ * URL that describes the type of the serialized message.
+ *
+ * Protobuf library provides support to pack/unpack Any values in the form
+ * of utility functions or additional generated methods of the Any type.
+ *
+ * Example 1: Pack and unpack a message in C++.
+ *
+ *     Foo foo = ...;
+ *     Any any;
+ *     any.PackFrom(foo);
+ *     ...
+ *     if (any.UnpackTo(&foo)) {
+ *       ...
+ *     }
+ *
+ * Example 2: Pack and unpack a message in Java.
+ *
+ *     Foo foo = ...;
+ *     Any any = Any.pack(foo);
+ *     ...
+ *     if (any.is(Foo.class)) {
+ *       foo = any.unpack(Foo.class);
+ *     }
+ *
+ *  Example 3: Pack and unpack a message in Python.
+ *
+ *     foo = Foo(...)
+ *     any = Any()
+ *     any.Pack(foo)
+ *     ...
+ *     if any.Is(Foo.DESCRIPTOR):
+ *       any.Unpack(foo)
+ *       ...
+ *
+ *  Example 4: Pack and unpack a message in Go
+ *
+ *      foo := &pb.Foo{...}
+ *      any, err := ptypes.MarshalAny(foo)
+ *      ...
+ *      foo := &pb.Foo{}
+ *      if err := ptypes.UnmarshalAny(any, foo); err != nil {
+ *        ...
+ *      }
+ *
+ * The pack methods provided by protobuf library will by default use
+ * 'type.googleapis.com/full.type.name' as the type URL and the unpack
+ * methods only use the fully qualified type name after the last '/'
+ * in the type URL, for example "foo.bar.com/x/y.z" will yield type
+ * name "y.z".
+ *
+ *
+ * JSON
+ * ====
+ * The JSON representation of an `Any` value uses the regular
+ * representation of the deserialized, embedded message, with an
+ * additional field `\@type` which contains the type URL. Example:
+ *
+ *     package google.profile;
+ *     message Person {
+ *       string first_name = 1;
+ *       string last_name = 2;
+ *     }
+ *
+ *     {
+ *       "\@type": "type.googleapis.com/google.profile.Person",
+ *       "firstName": <string>,
+ *       "lastName": <string>
+ *     }
+ *
+ * If the embedded message type is well-known and has a custom JSON
+ * representation, that representation will be embedded adding a field
+ * `value` which holds the custom JSON in addition to the `\@type`
+ * field. Example (for message [google.protobuf.Duration][]):
+ *
+ *     {
+ *       "\@type": "type.googleapis.com/google.protobuf.Duration",
+ *       "value": "1.212s"
+ *     }
+ **/
+@interface GPBAny : GPBMessage
+
+/**
+ * A URL/resource name that uniquely identifies the type of the serialized
+ * protocol buffer message. This string must contain at least
+ * one "/" character. The last segment of the URL's path must represent
+ * the fully qualified name of the type (as in
+ * `path/google.protobuf.Duration`). The name should be in a canonical form
+ * (e.g., leading "." is not accepted).
+ *
+ * In practice, teams usually precompile into the binary all types that they
+ * expect it to use in the context of Any. However, for URLs which use the
+ * scheme `http`, `https`, or no scheme, one can optionally set up a type
+ * server that maps type URLs to message definitions as follows:
+ *
+ * * If no scheme is provided, `https` is assumed.
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ *   value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ *   URL, or have them precompiled into a binary to avoid any
+ *   lookup. Therefore, binary compatibility needs to be preserved
+ *   on changes to types. (Use versioned type names to manage
+ *   breaking changes.)
+ *
+ * Note: this functionality is not currently available in the official
+ * protobuf release, and it is not used for type URLs beginning with
+ * type.googleapis.com.
+ *
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ **/
+@property(nonatomic, readwrite, copy, null_resettable) NSString *typeURL;
+
+/** Must be a valid serialized protocol buffer of the above specified type. */
+@property(nonatomic, readwrite, copy, null_resettable) NSData *value;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+CF_EXTERN_C_END
+
+#pragma clang diagnostic pop
+
+// @@protoc_insertion_point(global_scope)

+ 311 - 0
Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Api.pbobjc.h

@@ -0,0 +1,311 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/api.proto
+
+// This CPP symbol can be defined to use imports that match up to the framework
+// imports needed when using CocoaPods.
+#if !defined(GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS)
+ #define GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS 0
+#endif
+
+#if GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS
+ #import <Protobuf/GPBDescriptor.h>
+ #import <Protobuf/GPBMessage.h>
+ #import <Protobuf/GPBRootObject.h>
+#else
+ #import "GPBDescriptor.h"
+ #import "GPBMessage.h"
+ #import "GPBRootObject.h"
+#endif
+
+#if GOOGLE_PROTOBUF_OBJC_VERSION < 30002
+#error This file was generated by a newer version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+#if 30002 < GOOGLE_PROTOBUF_OBJC_MIN_SUPPORTED_VERSION
+#error This file was generated by an older version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+
+// @@protoc_insertion_point(imports)
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+
+CF_EXTERN_C_BEGIN
+
+@class GPBMethod;
+@class GPBMixin;
+@class GPBOption;
+@class GPBSourceContext;
+GPB_ENUM_FWD_DECLARE(GPBSyntax);
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - GPBApiRoot
+
+/**
+ * Exposes the extension registry for this file.
+ *
+ * The base class provides:
+ * @code
+ *   + (GPBExtensionRegistry *)extensionRegistry;
+ * @endcode
+ * which is a @c GPBExtensionRegistry that includes all the extensions defined by
+ * this file and all files that it depends on.
+ **/
+@interface GPBApiRoot : GPBRootObject
+@end
+
+#pragma mark - GPBApi
+
+typedef GPB_ENUM(GPBApi_FieldNumber) {
+  GPBApi_FieldNumber_Name = 1,
+  GPBApi_FieldNumber_MethodsArray = 2,
+  GPBApi_FieldNumber_OptionsArray = 3,
+  GPBApi_FieldNumber_Version = 4,
+  GPBApi_FieldNumber_SourceContext = 5,
+  GPBApi_FieldNumber_MixinsArray = 6,
+  GPBApi_FieldNumber_Syntax = 7,
+};
+
+/**
+ * Api is a light-weight descriptor for an API Interface.
+ *
+ * Interfaces are also described as "protocol buffer services" in some contexts,
+ * such as by the "service" keyword in a .proto file, but they are different
+ * from API Services, which represent a concrete implementation of an interface
+ * as opposed to simply a description of methods and bindings. They are also
+ * sometimes simply referred to as "APIs" in other contexts, such as the name of
+ * this message itself. See https://cloud.google.com/apis/design/glossary for
+ * detailed terminology.
+ **/
+@interface GPBApi : GPBMessage
+
+/**
+ * The fully qualified name of this interface, including package name
+ * followed by the interface's simple name.
+ **/
+@property(nonatomic, readwrite, copy, null_resettable) NSString *name;
+
+/** The methods of this interface, in unspecified order. */
+@property(nonatomic, readwrite, strong, null_resettable) NSMutableArray<GPBMethod*> *methodsArray;
+/** The number of items in @c methodsArray without causing the array to be created. */
+@property(nonatomic, readonly) NSUInteger methodsArray_Count;
+
+/** Any metadata attached to the interface. */
+@property(nonatomic, readwrite, strong, null_resettable) NSMutableArray<GPBOption*> *optionsArray;
+/** The number of items in @c optionsArray without causing the array to be created. */
+@property(nonatomic, readonly) NSUInteger optionsArray_Count;
+
+/**
+ * A version string for this interface. If specified, must have the form
+ * `major-version.minor-version`, as in `1.10`. If the minor version is
+ * omitted, it defaults to zero. If the entire version field is empty, the
+ * major version is derived from the package name, as outlined below. If the
+ * field is not empty, the version in the package name will be verified to be
+ * consistent with what is provided here.
+ *
+ * The versioning schema uses [semantic
+ * versioning](http://semver.org) where the major version number
+ * indicates a breaking change and the minor version an additive,
+ * non-breaking change. Both version numbers are signals to users
+ * what to expect from different versions, and should be carefully
+ * chosen based on the product plan.
+ *
+ * The major version is also reflected in the package name of the
+ * interface, which must end in `v<major-version>`, as in
+ * `google.feature.v1`. For major versions 0 and 1, the suffix can
+ * be omitted. Zero major versions must only be used for
+ * experimental, non-GA interfaces.
+ **/
+@property(nonatomic, readwrite, copy, null_resettable) NSString *version;
+
+/**
+ * Source context for the protocol buffer service represented by this
+ * message.
+ **/
+@property(nonatomic, readwrite, strong, null_resettable) GPBSourceContext *sourceContext;
+/** Test to see if @c sourceContext has been set. */
+@property(nonatomic, readwrite) BOOL hasSourceContext;
+
+/** Included interfaces. See [Mixin][]. */
+@property(nonatomic, readwrite, strong, null_resettable) NSMutableArray<GPBMixin*> *mixinsArray;
+/** The number of items in @c mixinsArray without causing the array to be created. */
+@property(nonatomic, readonly) NSUInteger mixinsArray_Count;
+
+/** The source syntax of the service. */
+@property(nonatomic, readwrite) enum GPBSyntax syntax;
+
+@end
+
+/**
+ * Fetches the raw value of a @c GPBApi's @c syntax property, even
+ * if the value was not defined by the enum at the time the code was generated.
+ **/
+int32_t GPBApi_Syntax_RawValue(GPBApi *message);
+/**
+ * Sets the raw value of an @c GPBApi's @c syntax property, allowing
+ * it to be set to a value that was not defined by the enum at the time the code
+ * was generated.
+ **/
+void SetGPBApi_Syntax_RawValue(GPBApi *message, int32_t value);
+
+#pragma mark - GPBMethod
+
+typedef GPB_ENUM(GPBMethod_FieldNumber) {
+  GPBMethod_FieldNumber_Name = 1,
+  GPBMethod_FieldNumber_RequestTypeURL = 2,
+  GPBMethod_FieldNumber_RequestStreaming = 3,
+  GPBMethod_FieldNumber_ResponseTypeURL = 4,
+  GPBMethod_FieldNumber_ResponseStreaming = 5,
+  GPBMethod_FieldNumber_OptionsArray = 6,
+  GPBMethod_FieldNumber_Syntax = 7,
+};
+
+/**
+ * Method represents a method of an API interface.
+ **/
+@interface GPBMethod : GPBMessage
+
+/** The simple name of this method. */
+@property(nonatomic, readwrite, copy, null_resettable) NSString *name;
+
+/** A URL of the input message type. */
+@property(nonatomic, readwrite, copy, null_resettable) NSString *requestTypeURL;
+
+/** If true, the request is streamed. */
+@property(nonatomic, readwrite) BOOL requestStreaming;
+
+/** The URL of the output message type. */
+@property(nonatomic, readwrite, copy, null_resettable) NSString *responseTypeURL;
+
+/** If true, the response is streamed. */
+@property(nonatomic, readwrite) BOOL responseStreaming;
+
+/** Any metadata attached to the method. */
+@property(nonatomic, readwrite, strong, null_resettable) NSMutableArray<GPBOption*> *optionsArray;
+/** The number of items in @c optionsArray without causing the array to be created. */
+@property(nonatomic, readonly) NSUInteger optionsArray_Count;
+
+/** The source syntax of this method. */
+@property(nonatomic, readwrite) enum GPBSyntax syntax;
+
+@end
+
+/**
+ * Fetches the raw value of a @c GPBMethod's @c syntax property, even
+ * if the value was not defined by the enum at the time the code was generated.
+ **/
+int32_t GPBMethod_Syntax_RawValue(GPBMethod *message);
+/**
+ * Sets the raw value of an @c GPBMethod's @c syntax property, allowing
+ * it to be set to a value that was not defined by the enum at the time the code
+ * was generated.
+ **/
+void SetGPBMethod_Syntax_RawValue(GPBMethod *message, int32_t value);
+
+#pragma mark - GPBMixin
+
+typedef GPB_ENUM(GPBMixin_FieldNumber) {
+  GPBMixin_FieldNumber_Name = 1,
+  GPBMixin_FieldNumber_Root = 2,
+};
+
+/**
+ * Declares an API Interface to be included in this interface. The including
+ * interface must redeclare all the methods from the included interface, but
+ * documentation and options are inherited as follows:
+ *
+ * - If after comment and whitespace stripping, the documentation
+ *   string of the redeclared method is empty, it will be inherited
+ *   from the original method.
+ *
+ * - Each annotation belonging to the service config (http,
+ *   visibility) which is not set in the redeclared method will be
+ *   inherited.
+ *
+ * - If an http annotation is inherited, the path pattern will be
+ *   modified as follows. Any version prefix will be replaced by the
+ *   version of the including interface plus the [root][] path if
+ *   specified.
+ *
+ * Example of a simple mixin:
+ *
+ *     package google.acl.v1;
+ *     service AccessControl {
+ *       // Get the underlying ACL object.
+ *       rpc GetAcl(GetAclRequest) returns (Acl) {
+ *         option (google.api.http).get = "/v1/{resource=**}:getAcl";
+ *       }
+ *     }
+ *
+ *     package google.storage.v2;
+ *     service Storage {
+ *       rpc GetAcl(GetAclRequest) returns (Acl);
+ *
+ *       // Get a data record.
+ *       rpc GetData(GetDataRequest) returns (Data) {
+ *         option (google.api.http).get = "/v2/{resource=**}";
+ *       }
+ *     }
+ *
+ * Example of a mixin configuration:
+ *
+ *     apis:
+ *     - name: google.storage.v2.Storage
+ *       mixins:
+ *       - name: google.acl.v1.AccessControl
+ *
+ * The mixin construct implies that all methods in `AccessControl` are
+ * also declared with same name and request/response types in
+ * `Storage`. A documentation generator or annotation processor will
+ * see the effective `Storage.GetAcl` method after inherting
+ * documentation and annotations as follows:
+ *
+ *     service Storage {
+ *       // Get the underlying ACL object.
+ *       rpc GetAcl(GetAclRequest) returns (Acl) {
+ *         option (google.api.http).get = "/v2/{resource=**}:getAcl";
+ *       }
+ *       ...
+ *     }
+ *
+ * Note how the version in the path pattern changed from `v1` to `v2`.
+ *
+ * If the `root` field in the mixin is specified, it should be a
+ * relative path under which inherited HTTP paths are placed. Example:
+ *
+ *     apis:
+ *     - name: google.storage.v2.Storage
+ *       mixins:
+ *       - name: google.acl.v1.AccessControl
+ *         root: acls
+ *
+ * This implies the following inherited HTTP annotation:
+ *
+ *     service Storage {
+ *       // Get the underlying ACL object.
+ *       rpc GetAcl(GetAclRequest) returns (Acl) {
+ *         option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
+ *       }
+ *       ...
+ *     }
+ **/
+@interface GPBMixin : GPBMessage
+
+/** The fully qualified name of the interface which is included. */
+@property(nonatomic, readwrite, copy, null_resettable) NSString *name;
+
+/**
+ * If non-empty specifies a path under which inherited HTTP paths
+ * are rooted.
+ **/
+@property(nonatomic, readwrite, copy, null_resettable) NSString *root;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+CF_EXTERN_C_END
+
+#pragma clang diagnostic pop
+
+// @@protoc_insertion_point(global_scope)

+ 145 - 0
Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Duration.pbobjc.h

@@ -0,0 +1,145 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/duration.proto
+
+// This CPP symbol can be defined to use imports that match up to the framework
+// imports needed when using CocoaPods.
+#if !defined(GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS)
+ #define GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS 0
+#endif
+
+#if GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS
+ #import <Protobuf/GPBDescriptor.h>
+ #import <Protobuf/GPBMessage.h>
+ #import <Protobuf/GPBRootObject.h>
+#else
+ #import "GPBDescriptor.h"
+ #import "GPBMessage.h"
+ #import "GPBRootObject.h"
+#endif
+
+#if GOOGLE_PROTOBUF_OBJC_VERSION < 30002
+#error This file was generated by a newer version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+#if 30002 < GOOGLE_PROTOBUF_OBJC_MIN_SUPPORTED_VERSION
+#error This file was generated by an older version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+
+// @@protoc_insertion_point(imports)
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+
+CF_EXTERN_C_BEGIN
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - GPBDurationRoot
+
+/**
+ * Exposes the extension registry for this file.
+ *
+ * The base class provides:
+ * @code
+ *   + (GPBExtensionRegistry *)extensionRegistry;
+ * @endcode
+ * which is a @c GPBExtensionRegistry that includes all the extensions defined by
+ * this file and all files that it depends on.
+ **/
+@interface GPBDurationRoot : GPBRootObject
+@end
+
+#pragma mark - GPBDuration
+
+typedef GPB_ENUM(GPBDuration_FieldNumber) {
+  GPBDuration_FieldNumber_Seconds = 1,
+  GPBDuration_FieldNumber_Nanos = 2,
+};
+
+/**
+ * A Duration represents a signed, fixed-length span of time represented
+ * as a count of seconds and fractions of seconds at nanosecond
+ * resolution. It is independent of any calendar and concepts like "day"
+ * or "month". It is related to Timestamp in that the difference between
+ * two Timestamp values is a Duration and it can be added or subtracted
+ * from a Timestamp. Range is approximately +-10,000 years.
+ *
+ * # Examples
+ *
+ * Example 1: Compute Duration from two Timestamps in pseudo code.
+ *
+ *     Timestamp start = ...;
+ *     Timestamp end = ...;
+ *     Duration duration = ...;
+ *
+ *     duration.seconds = end.seconds - start.seconds;
+ *     duration.nanos = end.nanos - start.nanos;
+ *
+ *     if (duration.seconds < 0 && duration.nanos > 0) {
+ *       duration.seconds += 1;
+ *       duration.nanos -= 1000000000;
+ *     } else if (durations.seconds > 0 && duration.nanos < 0) {
+ *       duration.seconds -= 1;
+ *       duration.nanos += 1000000000;
+ *     }
+ *
+ * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+ *
+ *     Timestamp start = ...;
+ *     Duration duration = ...;
+ *     Timestamp end = ...;
+ *
+ *     end.seconds = start.seconds + duration.seconds;
+ *     end.nanos = start.nanos + duration.nanos;
+ *
+ *     if (end.nanos < 0) {
+ *       end.seconds -= 1;
+ *       end.nanos += 1000000000;
+ *     } else if (end.nanos >= 1000000000) {
+ *       end.seconds += 1;
+ *       end.nanos -= 1000000000;
+ *     }
+ *
+ * Example 3: Compute Duration from datetime.timedelta in Python.
+ *
+ *     td = datetime.timedelta(days=3, minutes=10)
+ *     duration = Duration()
+ *     duration.FromTimedelta(td)
+ *
+ * # JSON Mapping
+ *
+ * In JSON format, the Duration type is encoded as a string rather than an
+ * object, where the string ends in the suffix "s" (indicating seconds) and
+ * is preceded by the number of seconds, with nanoseconds expressed as
+ * fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+ * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+ * be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+ * microsecond should be expressed in JSON format as "3.000001s".
+ **/
+@interface GPBDuration : GPBMessage
+
+/**
+ * Signed seconds of the span of time. Must be from -315,576,000,000
+ * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+ * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
+ **/
+@property(nonatomic, readwrite) int64_t seconds;
+
+/**
+ * Signed fractions of a second at nanosecond resolution of the span
+ * of time. Durations less than one second are represented with a 0
+ * `seconds` field and a positive or negative `nanos` field. For durations
+ * of one second or more, a non-zero value for the `nanos` field must be
+ * of the same sign as the `seconds` field. Must be from -999,999,999
+ * to +999,999,999 inclusive.
+ **/
+@property(nonatomic, readwrite) int32_t nanos;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+CF_EXTERN_C_END
+
+#pragma clang diagnostic pop
+
+// @@protoc_insertion_point(global_scope)

+ 74 - 0
Libraries external/Firebase/MLVision/Protobuf.framework/Headers/Empty.pbobjc.h

@@ -0,0 +1,74 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/empty.proto
+
+// This CPP symbol can be defined to use imports that match up to the framework
+// imports needed when using CocoaPods.
+#if !defined(GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS)
+ #define GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS 0
+#endif
+
+#if GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS
+ #import <Protobuf/GPBDescriptor.h>
+ #import <Protobuf/GPBMessage.h>
+ #import <Protobuf/GPBRootObject.h>
+#else
+ #import "GPBDescriptor.h"
+ #import "GPBMessage.h"
+ #import "GPBRootObject.h"
+#endif
+
+#if GOOGLE_PROTOBUF_OBJC_VERSION < 30002
+#error This file was generated by a newer version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+#if 30002 < GOOGLE_PROTOBUF_OBJC_MIN_SUPPORTED_VERSION
+#error This file was generated by an older version of protoc which is incompatible with your Protocol Buffer library sources.
+#endif
+
+// @@protoc_insertion_point(imports)
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+
+CF_EXTERN_C_BEGIN
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - GPBEmptyRoot
+
+/**
+ * Exposes the extension registry for this file.
+ *
+ * The base class provides:
+ * @code
+ *   + (GPBExtensionRegistry *)extensionRegistry;
+ * @endcode
+ * which is a @c GPBExtensionRegistry that includes all the extensions defined by
+ * this file and all files that it depends on.
+ **/
+@interface GPBEmptyRoot : GPBRootObject
+@end
+
+#pragma mark - GPBEmpty
+
+/**
+ * A generic empty message that you can re-use to avoid defining duplicated
+ * empty messages in your APIs. A typical example is to use it as the request
+ * or the response type of an API method. For instance:
+ *
+ *     service Foo {
+ *       rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ *     }
+ *
+ * The JSON representation for `Empty` is empty JSON object `{}`.
+ **/
+@interface GPBEmpty : GPBMessage
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+CF_EXTERN_C_END
+
+#pragma clang diagnostic pop
+
+// @@protoc_insertion_point(global_scope)

Some files were not shown because too many files changed in this diff