// // face_detection_short_range.swift // // This file was automatically generated and should not be edited. // import CoreML /// Model Prediction Input Type @available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) public class face_detection_short_rangeInput : MLFeatureProvider { /// image as color (kCVPixelFormatType_32BGRA) image buffer, 128 pixels wide by 128 pixels high public var image: CVPixelBuffer public var featureNames: Set { get { return ["image"] } } public func featureValue(for featureName: String) -> MLFeatureValue? { if (featureName == "image") { return MLFeatureValue(pixelBuffer: image) } return nil } public init(image: CVPixelBuffer) { self.image = image } public convenience init(imageWith image: CGImage) throws { self.init(image: try MLFeatureValue(cgImage: image, pixelsWide: 128, pixelsHigh: 128, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) } public convenience init(imageAt image: URL) throws { self.init(image: try MLFeatureValue(imageAt: image, pixelsWide: 128, pixelsHigh: 128, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!) } public func setImage(with image: CGImage) throws { self.image = try MLFeatureValue(cgImage: image, pixelsWide: 128, pixelsHigh: 128, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! } public func setImage(with image: URL) throws { self.image = try MLFeatureValue(imageAt: image, pixelsWide: 128, pixelsHigh: 128, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue! } } /// Model Prediction Output Type @available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) public class face_detection_short_rangeOutput : MLFeatureProvider { /// Source provided by CoreML private let provider : MLFeatureProvider /// 1477 as multidimensional array of floats public var _1477: MLMultiArray { return self.provider.featureValue(for: "1477")!.multiArrayValue! } /// 1477 as multidimensional array of floats @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) public var _1477ShapedArray: MLShapedArray { return MLShapedArray(self._1477) } /// 1011 as multidimensional array of floats public var _1011: MLMultiArray { return self.provider.featureValue(for: "1011")!.multiArrayValue! } /// 1011 as multidimensional array of floats @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) public var _1011ShapedArray: MLShapedArray { return MLShapedArray(self._1011) } public var featureNames: Set { return self.provider.featureNames } public func featureValue(for featureName: String) -> MLFeatureValue? { return self.provider.featureValue(for: featureName) } public init(_1477: MLMultiArray, _1011: MLMultiArray) { self.provider = try! MLDictionaryFeatureProvider(dictionary: ["1477" : MLFeatureValue(multiArray: _1477), "1011" : MLFeatureValue(multiArray: _1011)]) } public init(features: MLFeatureProvider) { self.provider = features } } /// Class for model loading and prediction @available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, *) public class face_detection_short_range { public let model: MLModel /// URL of model assuming it was installed in the same bundle as this class class var urlOfModelInThisBundle : URL { let bundle = Bundle.module return bundle.url(forResource: "face_detection_short_range", withExtension:"mlmodelc")! } /** Construct face_detection_short_range instance with an existing MLModel object. Usually the application does not use this initializer unless it makes a subclass of face_detection_short_range. Such application may want to use `MLModel(contentsOfURL:configuration:)` and `face_detection_short_range.urlOfModelInThisBundle` to create a MLModel object to pass-in. - parameters: - model: MLModel object */ init(model: MLModel) { self.model = model } /** Construct face_detection_short_range instance by automatically loading the model from the app's bundle. */ @available(*, deprecated, message: "Use init(configuration:) instead and handle errors appropriately.") public convenience init() { try! self.init(contentsOf: type(of:self).urlOfModelInThisBundle) } /** Construct a model with configuration - parameters: - configuration: the desired model configuration - throws: an NSError object that describes the problem */ public convenience init(configuration: MLModelConfiguration) throws { try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration) } /** Construct face_detection_short_range instance with explicit path to mlmodelc file - parameters: - modelURL: the file url of the model - throws: an NSError object that describes the problem */ public convenience init(contentsOf modelURL: URL) throws { try self.init(model: MLModel(contentsOf: modelURL)) } /** Construct a model with URL of the .mlmodelc directory and configuration - parameters: - modelURL: the file url of the model - configuration: the desired model configuration - throws: an NSError object that describes the problem */ public convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws { try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration)) } /** Construct face_detection_short_range instance asynchronously with optional configuration. Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. - parameters: - configuration: the desired model configuration - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully */ @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) public class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { return self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler) } /** Construct face_detection_short_range instance asynchronously with optional configuration. Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. - parameters: - configuration: the desired model configuration */ @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) public class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> face_detection_short_range { return try await self.load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration) } /** Construct face_detection_short_range instance asynchronously with URL of the .mlmodelc directory with optional configuration. Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. - parameters: - modelURL: the URL to the model - configuration: the desired model configuration - handler: the completion handler to be called when the model loading completes successfully or unsuccessfully */ @available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) public class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result) -> Void) { MLModel.load(contentsOf: modelURL, configuration: configuration) { result in switch result { case .failure(let error): handler(.failure(error)) case .success(let model): handler(.success(face_detection_short_range(model: model))) } } } /** Construct face_detection_short_range instance asynchronously with URL of the .mlmodelc directory with optional configuration. Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread. - parameters: - modelURL: the URL to the model - configuration: the desired model configuration */ @available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, *) public class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> face_detection_short_range { let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration) return face_detection_short_range(model: model) } /** Make a prediction using the structured interface - parameters: - input: the input to the prediction as face_detection_short_rangeInput - throws: an NSError object that describes the problem - returns: the result of the prediction as face_detection_short_rangeOutput */ public func prediction(input: face_detection_short_rangeInput) throws -> face_detection_short_rangeOutput { return try self.prediction(input: input, options: MLPredictionOptions()) } /** Make a prediction using the structured interface - parameters: - input: the input to the prediction as face_detection_short_rangeInput - options: prediction options - throws: an NSError object that describes the problem - returns: the result of the prediction as face_detection_short_rangeOutput */ public func prediction(input: face_detection_short_rangeInput, options: MLPredictionOptions) throws -> face_detection_short_rangeOutput { let outFeatures = try model.prediction(from: input, options:options) return face_detection_short_rangeOutput(features: outFeatures) } /** Make a prediction using the convenience interface - parameters: - image as color (kCVPixelFormatType_32BGRA) image buffer, 128 pixels wide by 128 pixels high - throws: an NSError object that describes the problem - returns: the result of the prediction as face_detection_short_rangeOutput */ public func prediction(image: CVPixelBuffer) throws -> face_detection_short_rangeOutput { let input_ = face_detection_short_rangeInput(image: image) return try self.prediction(input: input_) } /** Make a batch prediction using the structured interface - parameters: - inputs: the inputs to the prediction as [face_detection_short_rangeInput] - options: prediction options - throws: an NSError object that describes the problem - returns: the result of the prediction as [face_detection_short_rangeOutput] */ public func predictions(inputs: [face_detection_short_rangeInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [face_detection_short_rangeOutput] { let batchIn = MLArrayBatchProvider(array: inputs) let batchOut = try model.predictions(from: batchIn, options: options) var results : [face_detection_short_rangeOutput] = [] results.reserveCapacity(inputs.count) for i in 0..