How can I do with Swift to use Tensorflowlite?

// Returns a model from the provided buffer, or null on failure.
// NOTE: The caller retains ownership of the model_data and should ensure that
// the lifetime of the model_data must be at least as long as the lifetime
// of the TfLiteModel.
TFL_CAPI_EXPORT extern TfLiteModel* TfLiteModelCreate(const void* model_data,
size_t model_size);
I want to use this method in swift

//================//in swift i have writer method in Model

init?(filePath: String) {

guard !filePath.isEmpty, let cModel = TfLiteModelCreateFromFile(filePath) else { return nil }

self.cModel = cModel


init?(modelData: UnsafeRawPointer!,modelSize:Int) {

guard let cModel = TfLiteModelCreate(modelData, modelSize) else { return nil }

self.cModel = cModel


//== in Interpreter Class ==//

public init(modelData: UnsafeRawPointer!,modelSize:Int, options: Options? = nil, delegates: [Delegate]? = nil) throws {
  guard let model = Model(modelData: modelData, modelSize: modelSize) else { throw InterpreterError.failedToLoadModel }
  guard let cInterpreterOptions = TfLiteInterpreterOptionsCreate() else {
    throw InterpreterError.failedToCreateInterpreter
  defer { TfLiteInterpreterOptionsDelete(cInterpreterOptions) }

  self.options = options
  self.delegates = delegates {
    if let threadCount = $0.threadCount, threadCount > 0 {
      TfLiteInterpreterOptionsSetNumThreads(cInterpreterOptions, Int32(threadCount))
      { (_, format, args) -> Void in
        // Workaround for optionality differences for x86_64 (non-optional) and arm64 (optional).
        let optionalArgs: CVaListPointer? = args
        guard let cFormat = format,
          let arguments = optionalArgs,
          let message = String(cFormat: cFormat, arguments: arguments)
        else {
        print(String(describing: InterpreterError.tensorFlowLiteError(message)))
  delegates?.forEach { TfLiteInterpreterOptionsAddDelegate(cInterpreterOptions, $0.cDelegate) }

  // Configure the XNNPack delegate after the other delegates explicitly added by the user. {
    if $0.isXNNPackEnabled {
      configureXNNPack(options: $0, cInterpreterOptions: cInterpreterOptions)

  guard let cInterpreter = TfLiteInterpreterCreate(model.cModel, cInterpreterOptions) else {
    throw InterpreterError.failedToCreateInterpreter
  self.cInterpreter = cInterpreter


    let data =  try! Data.init(contentsOf: URL.init(fileURLWithPath: modelPath))
    let nsData = data as NSData

    let rawPtr = nsData.bytes
    interpreter = try Interpreter(modelData: rawPtr,modelSize: nsData.length, options: options)

// ====== =//

run and it crash ,how can i do

Could you please refer to TensorFlow Lite guide and let us know if it helps?Thank you

Yes , I had Download iOS AppDemo and run it success, but now I want to use model.tflite convert to Data to run ,but it crash .and iOS Demo has not given sample code to use TfLiteModelCreate(modelData, modelSize) to creat and run ,so i wan to try but it crash

can any demo give TfLiteModelCreate(char*buffer,size_t modelSize) to creat Model to run? Thanks!