/third_party/mindspore/mindspore/lite/test/st/ |
D | win_runtest.bat | 61 …if !TYPE_ID!==1 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 62 …if !TYPE_ID!==2 (converter_lite --fmk=MINDIR --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 63 …if !TYPE_ID!==3 (converter_lite --fmk=CAFFE --modelFile="%MODEL_PATH%/!MODEL_NAME!.prototxt" --wei… 64 …if !TYPE_ID!==4 (converter_lite --fmk=ONNX --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --outp… 65 …if !TYPE_ID!==5 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 66 …if !TYPE_ID!==6 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 67 …if !TYPE_ID!==7 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 68 …if !TYPE_ID!==8 (converter_lite --fmk=TFLITE --modelFile="%MODEL_PATH%/!MODEL_NAME!.!SUFFIX!" --ou… 92 …if !TYPE_ID!==1 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!.ms" --inDataFile=… 93 …if !TYPE_ID!==2 (benchmark --modelFile="%DST_PACKAGE_PATH%\!MODEL_NAME!.!SUFFIX!_train.ms" --inDat… [all …]
|
/third_party/mindspore/mindspore/lite/examples/transfer_learning/model/ |
D | prepare_model.sh | 36 LD_LIBRARY_PATH=./ $CONVERTER --fmk=MINDIR --trainModel=false --modelFile=transfer_learning_tod_ba… 37 LD_LIBRARY_PATH=./ $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=transfer_learning_tod_head…
|
/third_party/mindspore/mindspore/lite/examples/train_lenet/model/ |
D | prepare_model.sh | 48 LD_LIBRARY_PATH=./:${LD_LIBRARY_PATH} $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_t… 50 LD_LIBRARY_PATH=./:${LD_LIBRARY_PATH} $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=mix_len…
|
/third_party/mindspore/mindspore/lite/test/st/scripts/ |
D | run_benchmark_x86.sh | 28 …./converter_lite --fmk=TFLITE --modelFile=$models_path/${model_name} --outputFile=${ms_models_pat… 39 …./converter_lite --fmk=TFLITE --modelFile=$models_path/${model_name} --outputFile=${ms_models_pat… 50 …./converter_lite --fmk=TFLITE --modelFile=$models_path/${model_name} --outputFile=${ms_models_pat… 89 …./benchmark --modelFile=${ms_models_path}/${model_name}_1_1_parallel_split.ms --inDataFile=${model… 100 …./benchmark --modelFile=${ms_models_path}/${model_name}_1_2_parallel_split.ms --inDataFile=${model… 111 …./benchmark --modelFile=${ms_models_path}/${model_name}_1_3_parallel_split.ms --inDataFile=${model…
|
D | base_functions.sh | 73 …./converter_lite --fmk=${model_fmk} --modelFile=${model_file} --weightFile=${weight_file} --outpu… 226 …MSLITE_BENCH_INPUT_NAMES=${input_names} ./benchmark --modelFile=${model_file} --inDataFile=${input… 252 …./benchmark --inDataFile=${input_files} --modelFile=${model_file} --inputShapes=${input_shapes} --…
|
D | run_net_train.sh | 71 …./converter_lite --fmk=MINDIR --modelFile=${models_path}/${model_prefix}.mindir --outputFile=${ms_… 84 …./converter_lite --fmk=MINDIR --modelFile=${models_path}/${model_prefix}.mindir --outputFile=${ms_… 238 --modelFile=${model_file} \ 366 --modelFile=${model_file} \
|
D | run_cropper.sh | 48 …=./libmindspore-lite.a --configFile=./cropper/cropper_mapping_npu.cfg --modelFile=${ms_models_path…
|
D | run_benchmark_cropping_size.sh | 54 …=./libmindspore-lite.a --configFile=./cropper/cropper_mapping_cpu.cfg --modelFile=${ms_models_path…
|
/third_party/mindspore/mindspore/lite/examples/train_lenet_java/model/ |
D | prepare_model.sh | 35 $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_tod.mindir --outputFile=lenet_tod $QUAN…
|
/third_party/mindspore/mindspore/lite/examples/unified_api/model/ |
D | prepare_model.sh | 48 LD_LIBRARY_PATH=./:${LD_LIBRARY_PATH} $CONVERTER --fmk=MINDIR --trainModel=true --modelFile=lenet_t…
|
/third_party/mindspore/mindspore/lite/test/st/scripts/nnie/ |
D | run_benchmark_nnie.sh | 46 …./benchmark --modelFile=${basepath}/${model_name}.ms --inDataFile=${input_files} --inputShapes=${i…
|
D | run_converter_nnie.sh | 43 …./converter_lite --fmk=CAFFE --modelFile=${models_path}/${model_location}/model/${model_name}.pro…
|
/third_party/mindspore/mindspore/lite/tools/common/ |
D | graph_util.cc | 660 STATUS ValidateFileStr(const std::string &modelFile, const std::string &fileType) { in ValidateFileStr() argument 661 …if (modelFile.size() > fileType.size() && modelFile.substr(modelFile.size() - fileType.size()) == … in ValidateFileStr() 668 std::string GetModelName(const std::string &modelFile) { in GetModelName() argument 669 std::string modelName = modelFile; in GetModelName()
|
D | graph_util.h | 104 STATUS ValidateFileStr(const std::string &modelFile, const std::string &fileType); 108 std::string GetModelName(const std::string &modelFile);
|
/third_party/mindspore/mindspore/lite/tools/converter/import/ |
D | mindspore_importer.cc | 203 func_graph = LoadMindIR(flag.modelFile, false, key, key_len, flag.dec_mode); in ImportMindIR() 209 func_graph = LoadMindIR(flag.modelFile); in ImportMindIR()
|
/third_party/mindspore/mindspore/lite/tools/converter/ |
D | converter_flags.h | 70 std::string modelFile;
|
D | converter_flags.cc | 46 AddFlag(&Flags::modelFile, "modelFile", in Flags() 310 if (this->modelFile.empty()) { in Init()
|
D | converter.cc | 41 converter_parameters->model_file = flag.modelFile; in InitConverterParameters()
|
/third_party/gstreamer/gstplugins_bad/ext/onnx/ |
D | gstonnxclient.h | 72 bool createSession(std::string modelFile, GstOnnxOptimizationLevel optim,
|
D | gstonnxclient.cpp | 169 bool GstOnnxClient::createSession (std::string modelFile, in createSession() argument 212 session = new Ort::Session (getEnv (), modelFile.c_str (), sessionOptions); in createSession()
|
/third_party/mindspore/mindspore/lite/tools/converter/parser/tf/ |
D | tf_model_parser.cc | 500 auto modelFile = flag.model_file; in Parse() local 502 auto status = ValidateFileStr(modelFile, ".pb"); in Parse() 514 status = ReadProtoFromBinaryFile(modelFile, tf_root_graph_.get()); in Parse()
|