Commit 9452bcd1 by 曹润柘

update chapter1

parent 049b28a4
......@@ -492,6 +492,7 @@ His house is on the south bank of the river.
\item Joshua:Joshua\cite{Li2010Joshua}是由约翰霍普金斯大学的语言和语音处理中心开发的层次短语翻译系统。由于Joshua是由Java语言开发,所以它在不同的平台上运行或开发时具有良好的可扩展性和可移植性。Joshua也是使用非常广泛的通机器翻译系统之一。\url{http://joshua.sourceforge.net/Joshua/Welcome.html}
\vspace{0.5em}
\item SilkRoad:SilkRoad是由中国五个机构(中科院计算所、中科院软件所、中科院自动化所、厦门大学和哈尔滨工业大学)联合开发的,基于短语的统计机器翻译系统。该系统是中国乃至亚洲地区第一个开源的统计机器翻译系统。SilkRoad支持多解码器和规则提取并为不同组合的子系统提供了不同的实验选择。\url{http://www.nlp.org.cn/project/project.php?projid=14}
\\{\color{red} 关于silkroad系统,找了5个大学的nlp官网以及对相关论文搜索,也问了学长学姐和曹润柘,总共只找到了该系统的使用说明https://www.doc88.com/p-4174403220161.html以及一个提及它的文章《Machine Translation in China》}
\vspace{0.5em}
\item SAMT:SAMT\cite{zollmann2007the}是由卡内基梅隆大学机器翻译团队开发的语法增强的统计机器翻译系统。SAMT在解码的时候使用目标树来生成翻译规则,而不严格遵守目标语言的语法。SAMT 的一个亮点是它提供了简单但高效的方式来利用在机器翻译中句法信息。由于SAMT在hadoop中实现,它可受益于跨计算机群的大数据集的分布式处理。\url{http://www.cs.cmu.edu/zollmann/samt/}
\vspace{0.5em}
......@@ -525,7 +526,8 @@ His house is on the south bank of the river.
\vspace{0.5em}
\item OpenNMT:OpenNMT\cite{KleinOpenNMT}系统是由哈佛大学自然语言处理研究组开源的,基于Torch框架的神经机器翻译系统。OpenNMT系统的早期版本使用Lua 语言编写,现在也扩展到了TensorFlow和PyTorch,设计简单易用,易于扩展,同时保持效率和翻译精度。\url{https://github.com/OpenNMT/OpenNMT}
\vspace{0.5em}
\item 斯坦福神经机器翻译开源代码库:斯坦福大学自然语言处理组(Stanford NLP)发布了一篇文章({\color{red} 文章地址?参考文献}),总结了该研究组在神经机器翻译上的研究信息,同时实现了多种翻译模型。\\ \url{https://nlp.stanford.edu/projects/nmt/}
\item 斯坦福神经机器翻译开源代码库:斯坦福大学自然语言处理组(Stanford NLP)发布了一篇教程,介绍了该研究组在神经机器翻译上的研究信息,同时实现了多种翻译模型\cite{luong2016acl_hybrid}\url{https://nlp.stanford.edu/projects/nmt/}
\\{\color{red} 此处也问了学长学姐,可能该处的原意是引用了教程\\https://sites.google.com/site/acl16nmt/,所以按照自己的理解改了一下,另外在斯坦福nlp的官网上发现他们实现的三种结构中,只有两个开源了代码库,并且这两个连接给出的引用是一致的,所以暂时选用了它给出的引用}
\vspace{0.5em}
\item THUMT:清华大学NLP团队实现的神经机器翻译系统,支持Transformer等模型\cite{ZhangTHUMT}。该系统主要基于Tensorflow和Theano实现,其中Theano版本包含了RNNsearch模型,训练方式包括MLE (Maximum Likelihood Estimate), MRT\\(minimum risk training), SST(semi-supervised training)。Tensorflow 版本实现了Seq2Seq, RNNsearch, Transformer三种基本模型。\url{https://github.com/THUNLP-MT/THUMT}
\vspace{0.5em}
......
......@@ -481,33 +481,103 @@ year={2017}}
title={Mixed-Precision Training for NLP and Speech Recognition with OpenSeq2Seq},
author={Kuchaiev, Oleksii and Ginsburg, Boris and Gitman, Igor and Lavrukhin, Vitaly and Li, Jason and Nguyen, Huyen and Case, Carl and Micikevicius, Paulius},
}
@article{bahdanau2015neural,
title={Neural Machine Translation by Jointly Learning to Align and Translate},
author={Bahdanau, Dzmitry and Cho, Kyunghyun and Bengio, Yoshua},
year={2015}}
@inproceedings{肖桐2011CWMT2011,
title={CWMT2011东北大学参评系统NiuTrans介绍(英文)},
author={肖桐 and 张浩 and 李强 and 路琦 and 朱靖波 and 任飞亮 and 王会珍},
booktitle={机器翻译研究进展——第七届全国机器翻译研讨会论文集},
year={2011},
@article{Li2010Joshua,
title={Joshua: An Open Source Toolkit for Parsing-based Machine Translation},
author={Li, Zhifei and Callisonburch, Chris and Dyer, Chris and Ganitkevitch, Juri and Khudanpur, Sanjeev and Schwartz, Lane and Thornton, Wren N. G. and Weese, Jonathan and Zaidan, Omar F.},
pages={135--139},
year={2010},
}
@article{luong2016achieving,
title={Achieving Open Vocabulary Neural Machine Translation with Hybrid Word-Character Models},
author={Luong, Minhthang and Manning, Christopher D},
journal={arXiv: Computation and Language},
year={2016}}
@article{luong2015effective,
title={Effective Approaches to Attention-based Neural Machine Translation},
author={Luong, Minhthang and Pham, Hieu and Manning, Christopher D},
journal={arXiv: Computation and Language},
year={2015}}
@inproceedings{Goldberg2017Neural,
title={Neural Network Methods in Natural Language Processing},
author={Goldberg, Yoav and Hirst, Graeme},
booktitle={Neural Network Methods in Natural Language Processing},
year={2017},
}
@article{pino2010the,
title={The CUED HiFST System for the WMT10 Translation Shared Task},
author={Pino, Juan and Iglesias, Gonzalo and De Gispert, Adria and Blackwood, Graeme and Brunning, Jamie and Byrne, William},
pages={155--160},
year={2010}}
@article{see2016compression,
title={Compression of Neural Machine Translation Models via Pruning},
author={See, Abigail and Luong, Minhthang and Manning, Christopher D},
journal={arXiv: Artificial Intelligence},
@book{Junczysdowmunt2012SyMGiza,
title={SyMGiza++: Symmetrized Word Alignment Models for Statistical Machine Translation},
author={Junczysdowmunt, Marcin and Szał, Arkadiusz},
year={2012},
}
@article{VilarJane,
title={Jane: an advanced freely available hierarchical machine translation toolkit},
author={Vilar, David and Stein, Daniel and Huck, Matthias and Ney, Hermann},
journal={Machine Translation},
volume={26},
number={3},
pages={197-216},
}
@article{Cer2010Phrasal,
title={Phrasal: A Statistical Machine Translation Toolkit for Exploring New Model Features.},
author={Cer, Daniel M and Galley, Michel and Jurafsky, Daniel and Manning, Christopher D},
year={2010},
}
@article{zollmann2007the,
title={The Syntax Augmented MT (SAMT) System at the Shared Task for the 2007 ACL Workshop on Statistical Machine Translation},
author={Zollmann, Andreas and Venugopal, Ashish and Paulik, Matthias and Vogel, Stephan},
pages={216--219},
year={2007}}
@article{曼宁2005《统计自然语言处理基础》,
title={《统计自然语言处理基础》},
author={曼宁},
journal={中文信息学报},
volume={19},
number={3},
pages={54-54},
year={2005},
}
@article{zoph2016simple,
title={Simple, Fast Noise-Contrastive Estimation for Large RNN Vocabularies.},
author={Zoph, Barret and Vaswani, Ashish and May, Jonathan and Knight, Kevin},
pages={1217--1222},
year={2016}}
@article{bahdanau2015neural,
title={Neural Machine Translation by Jointly Learning to Align and Translate},
author={Bahdanau, Dzmitry and Cho, Kyunghyun and Bengio, Yoshua},
year={2015}}
\ No newline at end of file
@article{dyer2013a,
title={A Simple, Fast, and Effective Reparameterization of IBM Model 2},
author={Dyer, Chris and Chahuneau, Victor and Smith, Noah A},
pages={644--648},
year={2013}}
@article{nmtpy2017,
author = {Ozan Caglayan and
Mercedes Garc\'{i}a-Mart\'{i}nez and
Adrien Bardet and
Walid Aransa and
Fethi Bougares and
Lo\"{i}c Barrault},
title = {NMTPY: A Flexible Toolkit for Advanced Neural Machine Translation Systems},
journal = {Prague Bull. Math. Linguistics},
volume = {109},
pages = {15--28},
year = {2017},
url = {https://ufal.mff.cuni.cz/pbml/109/art-caglayan-et-al.pdf},
doi = {10.1515/pralin-2017-0035},
timestamp = {Tue, 12 Sep 2017 10:01:08 +0100}
}
@inproceedings{luong2016acl_hybrid,
author = {Luong, Minh-Thang and Manning, Christopher D.},
title = {Achieving Open Vocabulary Neural Machine Translation with Hybrid Word-Character Models},
booktitle = {Association for Computational Linguistics (ACL)},
address = {Berlin, Germany},
month = {August},
year = {2016}
}
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论