This commit is contained in:
Reuben Morais 2019-06-14 15:11:21 -03:00
Родитель e51b9d987d
Коммит 4b305d2f5e
4 изменённых файлов: 4 добавлений и 5 удалений

Просмотреть файл

@ -588,7 +588,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):
rnn_impl = rnn_impl_lstmblockfusedcell
logits, layers = create_model(batch_x=input_tensor,
seq_length=seq_length if FLAGS.use_seq_length else None,
seq_length=seq_length if not FLAGS.export_tflite else None,
dropout=no_dropout,
previous_state=previous_state,
overlap=False,
@ -630,7 +630,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):
'input_samples': input_samples,
}
if FLAGS.use_seq_length:
if not FLAGS.export_tflite:
inputs.update({'input_lengths': seq_length})
outputs = {

Просмотреть файл

@ -343,7 +343,7 @@ Refer to the corresponding [README.md](native_client/README.md) for information
### Exporting a model for TFLite
If you want to experiment with the TF Lite engine, you need to export a model that is compatible with it, then use the `--nouse_seq_length --export_tflite` flags. If you already have a trained model, you can re-export it for TFLite by running `DeepSpeech.py` again and specifying the same `checkpoint_dir` that you used for training, as well as passing `--nouse_seq_length --export_tflite --export_dir /model/export/destination`.
If you want to experiment with the TF Lite engine, you need to export a model that is compatible with it, then use the `--export_tflite` flags. If you already have a trained model, you can re-export it for TFLite by running `DeepSpeech.py` again and specifying the same `checkpoint_dir` that you used for training, as well as passing `--export_tflite --export_dir /model/export/destination`.
### Making a mmap-able model for inference

Просмотреть файл

@ -20,4 +20,4 @@ python -u DeepSpeech.py --noshow_progressbar \
--export_dir '/tmp/train_tflite' \
--lm_binary_path 'data/smoke_test/vocab.pruned.lm' \
--lm_trie_path 'data/smoke_test/vocab.trie' \
--export_tflite --nouse_seq_length
--export_tflite

Просмотреть файл

@ -73,7 +73,6 @@ def create_flags():
f.DEFINE_string('export_dir', '', 'directory in which exported models are stored - if omitted, the model won\'t get exported')
f.DEFINE_boolean('remove_export', False, 'whether to remove old exported models')
f.DEFINE_boolean('export_tflite', False, 'export a graph ready for TF Lite engine')
f.DEFINE_boolean('use_seq_length', True, 'have sequence_length in the exported graph(will make tfcompile unhappy)')
f.DEFINE_integer('n_steps', 16, 'how many timesteps to process at once by the export graph, higher values mean more latency')
f.DEFINE_string('export_language', '', 'language the model was trained on e.g. "en" or "English". Gets embedded into exported model.')