-
大小: 826KB文件类型: .zip金币: 2下载: 1 次发布日期: 2021-06-17
- 语言: Python
- 标签:
资源简介
TensorFlow神经机翻译(seq2seq) 教程
代码片段和文件信息
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License Version 2.0 (the “License“);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing software
# distributed under the License is distributed on an “AS IS“ BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
“““Attention-based sequence-to-sequence model with dynamic RNN support.“““
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from . import model
from . import model_helper
__all__ = [“AttentionModel“]
class AttentionModel(model.Model):
“““Sequence-to-sequence dynamic model with attention.
This class implements a multi-layer recurrent neural network as encoder
and an attention-based decoder. This is the same as the model described in
(Luong et al. EMNLP‘2015) paper: https://arxiv.org/pdf/1508.04025v5.pdf.
This class also allows to use GRU cells in addition to LSTM cells with
support for dropout.
“““
def __init__(self
hparams
mode
iterator
source_vocab_table
target_vocab_table
reverse_target_vocab_table=None
scope=None
extra_args=None):
self.has_attention = hparams.attention_architecture and hparams.attention
# Set attention_mechanism_fn
if self.has_attention:
if extra_args and extra_args.attention_mechanism_fn:
self.attention_mechanism_fn = extra_args.attention_mechanism_fn
else:
self.attention_mechanism_fn = create_attention_mechanism
super(AttentionModel self).__init__(
hparams=hparams
mode=mode
iterator=iterator
source_vocab_table=source_vocab_table
target_vocab_table=target_vocab_table
reverse_target_vocab_table=reverse_target_vocab_table
scope=scope
extra_args=extra_args)
def _prepare_beam_search_decoder_inputs(
self beam_width memory source_sequence_length encoder_state):
memory = tf.contrib.seq2seq.tile_batch(
memory multiplier=beam_width)
source_sequence_length = tf.contrib.seq2seq.tile_batch(
source_sequence_length multiplier=beam_width)
encoder_state = tf.contrib.seq2seq.tile_batch(
encoder_state multiplier=beam_width)
batch_size = self.batch_size * beam_width
return memory source_sequence_length encoder_state batch_size
def _build_decoder_cell(self hparams encoder_outputs encoder_state
source_sequence_length):
“““Build a RNN ce
属性 大小 日期 时间 名称
----------- --------- ---------- ----- ----
目录 0 2019-02-13 19:16 nmt-master\
文件 1455 2019-02-13 19:16 nmt-master\CONTRIBUTING.md
文件 11358 2019-02-13 19:16 nmt-master\LICENSE
文件 53850 2019-02-13 19:16 nmt-master\README.md
目录 0 2019-02-13 19:16 nmt-master\nmt\
文件 50 2019-02-13 19:16 nmt-master\nmt\.gitignore
文件 0 2019-02-13 19:16 nmt-master\nmt\__init__.py
文件 7387 2019-02-13 19:16 nmt-master\nmt\attention_model.py
目录 0 2019-02-13 19:16 nmt-master\nmt\g3doc\
目录 0 2019-02-13 19:16 nmt-master\nmt\g3doc\img\
文件 68102 2019-02-13 19:16 nmt-master\nmt\g3doc\img\attention_equation_0.jpg
文件 42298 2019-02-13 19:16 nmt-master\nmt\g3doc\img\attention_equation_1.jpg
文件 47590 2019-02-13 19:16 nmt-master\nmt\g3doc\img\attention_mechanism.jpg
文件 150076 2019-02-13 19:16 nmt-master\nmt\g3doc\img\attention_vis.jpg
文件 14737 2019-02-13 19:16 nmt-master\nmt\g3doc\img\encdec.jpg
文件 48359 2019-02-13 19:16 nmt-master\nmt\g3doc\img\greedy_dec.jpg
文件 58518 2019-02-13 19:16 nmt-master\nmt\g3doc\img\seq2seq.jpg
文件 12252 2019-02-13 19:16 nmt-master\nmt\gnmt_model.py
文件 8895 2019-02-13 19:16 nmt-master\nmt\inference.py
文件 6490 2019-02-13 19:16 nmt-master\nmt\inference_test.py
文件 33798 2019-02-13 19:16 nmt-master\nmt\model.py
文件 24395 2019-02-13 19:16 nmt-master\nmt\model_helper.py
文件 48578 2019-02-13 19:16 nmt-master\nmt\model_test.py
文件 29369 2019-02-13 19:16 nmt-master\nmt\nmt.py
文件 3404 2019-02-13 19:16 nmt-master\nmt\nmt_test.py
目录 0 2019-02-13 19:16 nmt-master\nmt\sc
文件 0 2019-02-13 19:16 nmt-master\nmt\sc
文件 4071 2019-02-13 19:16 nmt-master\nmt\sc
文件 1188 2019-02-13 19:16 nmt-master\nmt\sc
文件 10419 2019-02-13 19:16 nmt-master\nmt\sc
文件 6460 2019-02-13 19:16 nmt-master\nmt\sc
............此处省略34个文件信息
相关资源
- Python-DeepMoji模型的pyTorch实现
- Python-使用DeepFakes实现YouTube视频自动换
- Python-一系列高品质的动漫人脸数据集
- Python-Insightface人脸检测识别的最小化
- Python-自然场景文本检测PSENet的一个
- Python-在特征金字塔网络FPN的Pytorch实现
- Python-PyTorch实时多人姿态估计项目的实
- Python-用PyTorch10实现FasterRCNN和MaskRCNN比
- Python-心脏核磁共振MRI图像分割
- Python-基于YOLOv3的行人检测
- Python-RLSeq2Seq用于SequencetoSequence模型的
- Python-PyTorch对卷积CRF的参考实现
- Python-高效准确的EAST文本检测器的一个
- Python-pytorch实现的人脸检测和人脸识别
- Python-UNet用于医学图像分割的嵌套UN
- Python-TensorFlow弱监督图像分割
- Python-基于tensorflow实现的用textcnn方法
- Python-Keras实现Inceptionv4InceptionResnetv1和
- Python-pytorch中文手册
- Python-FastSCNN的PyTorch实现快速语义分割
- Python-滑动窗口高分辨率显微镜图像分
- Python-使用MovieLens数据集训练的电影推
- Python-机器学习驱动的Web应用程序防火
- Python-subpixel利用Tensorflow的一个子像素
-
Python-汉字的神经风格转移Neuralst
y - Python-神经网络模型能够从音频演讲中
- Python-深度增强学习算法的PyTorch实现策
- Python-基于深度学习的语音增强使用
- Python-基于知识图谱的红楼梦人物关系
- Python-STGAN用于图像合成的空间变换生
评论
共有 条评论