1. struct Content
    2. {
    3. AutoStorage<uint8_t> buffer;
    4. const Net *net = nullptr;
    5. std::vector<std::unique_ptr<Session>> sessions;
    6. std::map<const Tensor *, const Session *> tensorMap;
    7. };
    8. Interpreter *Interpreter::createFromFile(const char *file)
    9. {
    10. if (nullptr == file)
    11. {
    12. MNN_PRINT("NULL file for create interpreter\n");
    13. return nullptr;
    14. }
    15. std::unique_ptr<FileLoader> loader(new FileLoader(file));
    16. if (!loader->valid())
    17. {
    18. MNN_PRINT("Create interpreter failed, open %s error\n", file);
    19. return nullptr;
    20. }
    21. bool result = loader->read(); // 读取文件
    22. if (!result)
    23. {
    24. MNN_PRINT("Read file error\n");
    25. return nullptr;
    26. }
    27. if (loader->size() == 0)
    28. {
    29. MNN_PRINT("Create interpreter failed, %s is empty\n", file);
    30. return nullptr;
    31. }
    32. auto net = new Content;
    33. bool success = loader->merge(net->buffer); //合并到AutoStorage<uint8_t> buffer
    34. if (!success)
    35. {
    36. return nullptr;
    37. }
    38. loader.reset();
    39. return createFromBufferInternal(net);
    40. }
    1. Interpreter *Interpreter::createFromBufferInternal(Content *net)
    2. {
    3. if (nullptr == net)
    4. {
    5. MNN_PRINT("Buffer is null for create interpreter\n");
    6. return nullptr;
    7. }
    8. flatbuffers::Verifier verify((const uint8_t *)(net->buffer.get()), net->buffer.size());
    9. if (false == VerifyNetBuffer(verify)) // 验证内存是否符合flatbuffer格式
    10. {
    11. MNN_PRINT("Invalidate buffer to create interpreter\n");
    12. delete net;
    13. return nullptr;
    14. }
    15. net->net = GetNet(net->buffer.get()); // 获取网络
    16. if (nullptr == net->net->oplists())
    17. {
    18. MNN_ERROR("Model has no oplist\n");
    19. delete net;
    20. return nullptr;
    21. }
    22. int opSize = net->net->oplists()->size();
    23. for (int i = 0; i < opSize; ++i)
    24. {
    25. auto op = net->net->oplists()->GetAs<Op>(i); // 验证网络op是否合格
    26. if (nullptr == op || nullptr == op->outputIndexes())
    27. {
    28. MNN_ERROR("Invalid Model, the %d op is empty\n", i);
    29. delete net;
    30. return nullptr;
    31. }
    32. }
    33. return new Interpreter(net);
    34. }

    MNN使用FlatBuffers存储模型,这是一个注重性能和资源使用的序列化类库。相较于Protocol Buffers,其更适用于移动设备,FlatBuffers提供更高的性能以及更低的资源需求。
    深入浅出 FlatBuffers

    1. Session *Interpreter::createMultiPathSession(const std::vector<ScheduleConfig> &configs)
    2. {
    3. if (nullptr == mNet->buffer.get())
    4. {
    5. MNN_ERROR("The model buffer has been released. Can't create session\n");
    6. return nullptr;
    7. }
    8. // 解析Schedule
    9. auto info = Schedule::schedule(mNet->net, configs);
    10. auto newSession = std::unique_ptr<Session>(new Session(info));
    11. if (!newSession->valid())
    12. {
    13. MNN_PRINT("Invalid Session!!\n");
    14. return nullptr;
    15. }
    16. auto result = newSession.get();
    17. if (info.validForResize)
    18. {
    19. result->resize(); // 做resize,为推理做准备工作
    20. }
    21. mNet->sessions.emplace_back(std::move(newSession));
    22. return result;
    23. }

    通过Schedule创建Session,同时对Session进行resize,为推理做准备。