bert-utils/args.py

27 lines
703 B
Python
Raw Normal View History

2019-01-29 18:31:51 +08:00
import os
2019-01-30 11:39:49 +08:00
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
2019-01-29 18:31:51 +08:00
file_path = os.path.dirname(__file__)
model_dir = os.path.join(file_path, 'chinese_L-12_H-768_A-12/')
config_name = os.path.join(model_dir, 'bert_config.json')
ckpt_name = os.path.join(model_dir, 'bert_model.ckpt')
output_dir = os.path.join(model_dir, '../tmp/result/')
vocab_file = os.path.join(model_dir, 'vocab.txt')
data_dir = os.path.join(model_dir, '../data/')
2019-01-30 11:39:49 +08:00
num_train_epochs = 10
2019-01-29 18:31:51 +08:00
batch_size = 128
2019-01-30 11:39:49 +08:00
learning_rate = 0.00005
2019-01-29 18:31:51 +08:00
2019-01-30 11:39:49 +08:00
# gpu使用率
2019-01-29 18:31:51 +08:00
gpu_memory_fraction = 0.8
2019-01-30 11:39:49 +08:00
# 默认取倒数第二层的输出值作为句向量
layer_indexes = [-2]
2019-01-29 18:31:51 +08:00
2019-01-30 11:39:49 +08:00
# 序列的最大程度,单文本建议把该值调小
max_seq_len = 32