diff --git a/.travis.yml b/.travis.yml index 8421578fd2..56b9550da4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,13 +4,6 @@ python: - "3.6" git: depth: false -addons: - apt: - packages: - # Additional dependencies for im2text and speech2text - - libsox-dev - - libsox-fmt-all - - sox before_install: # Install CPU version of PyTorch. - if [[ $TRAVIS_PYTHON_VERSION == 3.6 ]]; then pip install torch==1.6.0 -f https://download.pytorch.org/whl/cpu/torch_stable.html; fi @@ -39,40 +32,30 @@ jobs: - name: "Unit tests" # Please also add tests to `test/pull_request_chk.sh`. script: - - wget -O /tmp/im2text.tgz http://lstm.seas.harvard.edu/latex/im2text_small.tgz; tar zxf /tmp/im2text.tgz -C /tmp/; head /tmp/im2text/src-train.txt > /tmp/im2text/src-train-head.txt; head /tmp/im2text/tgt-train.txt > /tmp/im2text/tgt-train-head.txt; head /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt; head /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt - - wget -O /tmp/speech.tgz http://lstm.seas.harvard.edu/latex/speech.tgz; tar zxf /tmp/speech.tgz -C /tmp/; head /tmp/speech/src-train.txt > /tmp/speech/src-train-head.txt; head /tmp/speech/tgt-train.txt > /tmp/speech/tgt-train-head.txt; head /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt; head /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt - - wget -O /tmp/test_model_speech.pt http://lstm.seas.harvard.edu/latex/model_step_2760.pt - - wget -O /tmp/test_model_im2text.pt http://lstm.seas.harvard.edu/latex/test_model_im2text.pt - python -m unittest discover - # test nmt preprocessing - - python preprocess.py -train_src data/src-train.txt -train_tgt data/tgt-train.txt -valid_src data/src-val.txt -valid_tgt data/tgt-val.txt -save_data /tmp/data -src_vocab_size 1000 -tgt_vocab_size 1000 && rm -rf /tmp/data*.pt - # test im2text preprocessing - - python preprocess.py -data_type img -shard_size 100 -src_dir /tmp/im2text/images -train_src /tmp/im2text/src-train.txt -train_tgt /tmp/im2text/tgt-train.txt -valid_src /tmp/im2text/src-val.txt -valid_tgt /tmp/im2text/tgt-val.txt -save_data /tmp/im2text/data && rm -rf /tmp/im2text/data*.pt - # test speech2text preprocessing - - python preprocess.py -data_type audio -shard_size 300 -src_dir /tmp/speech/an4_dataset -train_src /tmp/speech/src-train.txt -train_tgt /tmp/speech/tgt-train.txt -valid_src /tmp/speech/src-val.txt -valid_tgt /tmp/speech/tgt-val.txt -save_data /tmp/speech/data && rm -rf /tmp/speech/data*.pt + # test build vocabulary + - PYTHONPATH=$PYTHONPATH:. python onmt/bin/build_vocab.py -config data/data.yaml -save_data /tmp/onmt -n_sample 5000 && rm -rf /tmp/sample + + # test nmt field/transform prepare: used for tool testing + - python train.py -config data/data.yaml -save_data /tmp/onmt.train.check -dump_fields -dump_transforms -n_sample 30 -src_vocab /tmp/onmt.vocab.src -tgt_vocab /tmp/onmt.vocab.tgt -src_vocab_size 1000 -tgt_vocab_size 1000 + # test nmt training + - python train.py -config data/data.yaml -src_vocab /tmp/onmt.vocab.src -tgt_vocab /tmp/onmt.vocab.tgt -src_vocab_size 1000 -tgt_vocab_size 1000 -rnn_size 2 -batch_size 10 -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 + # test nmt training w/copy + - python train.py -config data/data.yaml -src_vocab /tmp/onmt.vocab.src -tgt_vocab /tmp/onmt.vocab.tgt -src_vocab_size 1000 -tgt_vocab_size 1000 -rnn_size 2 -batch_size 10 -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 -copy_attn + # test nmt training w/align + - python train.py -config data/align_data.yaml -src_vocab /tmp/onmt.vocab.src -tgt_vocab /tmp/onmt.vocab.tgt -src_vocab_size 1000 -tgt_vocab_size 1000 -max_generator_batches 0 -encoder_type transformer -decoder_type transformer -layers 4 -word_vec_size 16 -rnn_size 16 -heads 2 -transformer_ff 64 -lambda_align 0.05 -alignment_layer 2 -alignment_heads 0 -report_every 5 -train_steps 10 + # test Graph neural network training + - python train.py -config data/ggnn_data.yaml -src_seq_length 1000 -tgt_seq_length 30 -encoder_type ggnn -layers 2 -decoder_type rnn -rnn_size 256 -learning_rate 0.1 -learning_rate_decay 0.8 -global_attention general -batch_size 32 -word_vec_size 256 -bridge -train_steps 10 -n_edge_types 9 -state_dim 256 -n_steps 10 -n_node 64 # test nmt translation - head data/src-test.txt > /tmp/src-test.txt; python translate.py -model onmt/tests/test_model.pt -src /tmp/src-test.txt -verbose # test nmt ensemble translation - head data/src-test.txt > /tmp/src-test.txt; python translate.py -model onmt/tests/test_model.pt onmt/tests/test_model.pt -src /tmp/src-test.txt -verbose - # test im2text translation - - head /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt; head /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt; python translate.py -data_type img -src_dir /tmp/im2text/images -model /tmp/test_model_im2text.pt -src /tmp/im2text/src-val-head.txt -tgt /tmp/im2text/tgt-val-head.txt -verbose -out /tmp/im2text/trans - # test speech2text translation - - head /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt; head /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt; python translate.py -data_type audio -src_dir /tmp/speech/an4_dataset -model /tmp/test_model_speech.pt -src /tmp/speech/src-val-head.txt -tgt /tmp/speech/tgt-val-head.txt -verbose -out /tmp/speech/trans; diff /tmp/speech/tgt-val-head.txt /tmp/speech/trans - # test nmt preprocessing and training - - head -500 data/src-val.txt > /tmp/src-val.txt; head -500 data/tgt-val.txt > /tmp/tgt-val.txt; python preprocess.py -train_src /tmp/src-val.txt -train_tgt /tmp/tgt-val.txt -valid_src /tmp/src-val.txt -valid_tgt /tmp/tgt-val.txt -save_data /tmp/q -src_vocab_size 1000 -tgt_vocab_size 1000; python train.py -data /tmp/q -rnn_size 2 -batch_size 2 -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 && rm -rf /tmp/q*.pt - # test nmt preprocessing w/ sharding and training w/copy - - head -50 data/src-val.txt > /tmp/src-val.txt; head -50 data/tgt-val.txt > /tmp/tgt-val.txt; python preprocess.py -train_src /tmp/src-val.txt -train_tgt /tmp/tgt-val.txt -valid_src /tmp/src-val.txt -valid_tgt /tmp/tgt-val.txt -shard_size 25 -dynamic_dict -save_data /tmp/q -src_vocab_size 1000 -tgt_vocab_size 1000; python train.py -data /tmp/q -rnn_size 2 -batch_size 2 -word_vec_size 5 -report_every 5 -rnn_size 10 -copy_attn -train_steps 10 -pool_factor 10 && rm -rf /tmp/q*.pt - - # test Graph neural network preprocessing and training - - cp data/ggnnsrc.txt /tmp/src-val.txt; cp data/ggnntgt.txt /tmp/tgt-val.txt; python preprocess.py -train_src /tmp/src-val.txt -train_tgt /tmp/tgt-val.txt -valid_src /tmp/src-val.txt -valid_tgt /tmp/tgt-val.txt -src_seq_length 1000 -tgt_seq_length 30 -src_vocab data/ggnnsrcvocab.txt -tgt_vocab data/ggnntgtvocab.txt -dynamic_dict -save_data /tmp/q ; python train.py -data /tmp/q -encoder_type ggnn -layers 2 -decoder_type rnn -rnn_size 256 -learning_rate 0.1 -learning_rate_decay 0.8 -global_attention general -batch_size 32 -word_vec_size 256 -bridge -train_steps 10 -src_vocab data/ggnnsrcvocab.txt -n_edge_types 9 -state_dim 256 -n_steps 10 -n_node 64 && rm -rf /tmp/q*.pt - - # test im2text preprocessing and training - - head -50 /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt; head -50 /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt; python preprocess.py -data_type img -src_dir /tmp/im2text/images -train_src /tmp/im2text/src-val-head.txt -train_tgt /tmp/im2text/tgt-val-head.txt -valid_src /tmp/im2text/src-val-head.txt -valid_tgt /tmp/im2text/tgt-val-head.txt -save_data /tmp/im2text/q -tgt_seq_length 100; python train.py -model_type img -data /tmp/im2text/q -rnn_size 2 -batch_size 2 -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 -pool_factor 10 && rm -rf /tmp/im2text/q*.pt - # test speech2text preprocessing and training - - head -100 /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt; head -100 /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt; python preprocess.py -data_type audio -src_dir /tmp/speech/an4_dataset -train_src /tmp/speech/src-val-head.txt -train_tgt /tmp/speech/tgt-val-head.txt -valid_src /tmp/speech/src-val-head.txt -valid_tgt /tmp/speech/tgt-val-head.txt -save_data /tmp/speech/q; python train.py -model_type audio -data /tmp/speech/q -rnn_size 2 -batch_size 2 -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 -pool_factor 10 && rm -rf /tmp/speech/q*.pt - # test nmt translation - - python translate.py -model onmt/tests/test_model2.pt -src data/morph/src.valid -verbose -batch_size 10 -beam_size 10 -tgt data/morph/tgt.valid -out /tmp/trans; diff data/morph/tgt.valid /tmp/trans + # test nmt translation with beam search + - python translate.py -model onmt/tests/test_model2.pt -src data/morph/src.valid -verbose -batch_size 10 -beam_size 10 -tgt data/morph/tgt.valid -out /tmp/trans; diff data/morph/tgt.valid /tmp/trans && rm /tmp/trans # test nmt translation with random sampling - - python translate.py -model onmt/tests/test_model2.pt -src data/morph/src.valid -verbose -batch_size 10 -beam_size 1 -seed 1 -random_sampling_topk "-1" -random_sampling_temp 0.0001 -tgt data/morph/tgt.valid -out /tmp/trans; diff data/morph/tgt.valid /tmp/trans + - python translate.py -model onmt/tests/test_model2.pt -src data/morph/src.valid -verbose -batch_size 10 -beam_size 1 -seed 1 -random_sampling_topk "-1" -random_sampling_temp 0.0001 -tgt data/morph/tgt.valid -out /tmp/trans; diff data/morph/tgt.valid /tmp/trans && rm /tmp/trans + # test tool + - PYTHONPATH=$PYTHONPATH:. python tools/extract_vocabulary.py -file /tmp/onmt.train.check.vocab.pt -file_type field -side src -out_file /tmp/onmt.vocab.txt; if ! wc -l /tmp/onmt.vocab.txt | grep -qF "1002"; then echo "wrong word count" && exit 1; else echo "creat vocabulary pass"; fi + - PYTHONPATH=$PYTHONPATH:. python tools/embeddings_to_torch.py -emb_file_enc onmt/tests/sample_glove.txt -emb_file_dec onmt/tests/sample_glove.txt -dict_file /tmp/onmt.train.check.vocab.pt -output_file /tmp/q_gloveembeddings && rm /tmp/q_gloveembeddings; rm /tmp/onmt.train.check.*.pt - PYTHONPATH=$PYTHONPATH:. python tools/extract_embeddings.py -model onmt/tests/test_model.pt diff --git a/README.md b/README.md index 0367199668..eb5d2d7dea 100644 --- a/README.md +++ b/README.md @@ -2,31 +2,74 @@ [![Build Status](https://travis-ci.org/OpenNMT/OpenNMT-py.svg?branch=master)](https://travis-ci.org/OpenNMT/OpenNMT-py) [![Run on FH](https://img.shields.io/badge/Run%20on-FloydHub-blue.svg)](https://floydhub.com/run?template=https://github.com/OpenNMT/OpenNMT-py) +[![Documentation](https://img.shields.io/badge/docs-latest-blue.svg)](https://opennmt.net/OpenNMT-py/) +[![Gitter](https://badges.gitter.im/OpenNMT/OpenNMT-py.svg)](https://gitter.im/OpenNMT/OpenNMT-py?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) +[![Forum](https://img.shields.io/badge/discourse-online-green.svg)](https://forum.opennmt.net/) -This is a [PyTorch](https://github.com/pytorch/pytorch) -port of [OpenNMT](https://github.com/OpenNMT/OpenNMT), -an open-source (MIT) neural machine translation system. It is designed to be research friendly to try out new ideas in translation, summary, image-to-text, morphology, and many other domains. Some companies have proven the code to be production ready. +This is the [PyTorch](https://github.com/pytorch/pytorch) version of the [OpenNMT](https://opennmt.net) project, an open-source (MIT) neural machine translation framework. It is designed to be research friendly to try out new ideas in translation, summary, morphology, and many other domains. Some companies have proven the code to be production ready. We love contributions. Please consult the Issues page for any [Contributions Welcome](https://github.com/OpenNMT/OpenNMT-py/issues?q=is%3Aissue+is%3Aopen+label%3A%22contributions+welcome%22) tagged post.
- Before raising an issue, make sure you read the requirements and the documentation examples. Unless there is a bug, please use the [Forum](http://forum.opennmt.net) or [Gitter](https://gitter.im/OpenNMT/OpenNMT-py) to ask questions. +---- + +# Announcement - OpenNMT-py 2.0 + +**We're happy to announce the upcoming release v2.0 of OpenNMT-py.** + +The major idea behind this release is the -- almost -- complete **makeover of the data loading pipeline**. A new 'dynamic' paradigm is introduced, allowing to apply on the fly transforms to the data. + +This has a few advantages, amongst which: + +- remove or drastically reduce the preprocessing required to train a model; +- increase the possibilities of data augmentation and manipulation through on-the fly transforms. + +These transforms can be specific tokenization methods, filters, noising, or any custom transform users may want to implement. Custom transform implementation is quite straightforward thanks to the existing base class and example implementations. + +You can check out how to use this new data loading pipeline in the updated [docs](https://opennmt.net/OpenNMT-py). + +All the readily available transforms are described [here](https://opennmt.net/OpenNMT-py/FAQ.html#what-are-the-readily-available-on-the-fly-data-transforms). + +### Performance + +Given sufficient CPU resources according to GPU computing power, most of the transforms should not slow the training down. (Note: for now, one producer process per GPU is spawned -- meaning you would ideally need 2N CPU threads for N GPUs). + +### Breaking changes + +For now, the new data loading paradigm does not support Audio, Video and Image inputs. + +A few features are also dropped, at least for now: + +- audio, image and video inputs; +- source word features. + +For any user that still need these features, the previous codebase will be retained as `legacy` in a separate branch. It will no longer receive extensive development from the core team but PRs may still be accepted. + +Feel free to check it out and let us know what you think of the new paradigm! + +---- + Table of Contents ================= * [Full Documentation](http://opennmt.net/OpenNMT-py/) - * [Requirements](#requirements) + * [Usage](#usage) * [Features](#features) * [Quickstart](#quickstart) * [Run on FloydHub](#run-on-floydhub) * [Acknowledgements](#acknowledgements) * [Citation](#citation) -## Requirements +## Setup + +OpenNMT-py requires: + +- Python >= 3.5 +- Pytorch == 1.6.0 Install `OpenNMT-py` from `pip`: ```bash @@ -42,47 +85,44 @@ python setup.py install Note: If you have MemoryError in the install try to use `pip` with `--no-cache-dir`. -*(Optional)* some advanced features (e.g. working audio, image or pretrained models) requires extra packages, you can install it with: +*(Optional)* some advanced features (e.g. working pretrained models or specific transforms) requires extra packages, you can install it with: + ```bash pip install -r requirements.opt.txt ``` -Note: - -- some features require Python 3.5 and after (eg: Distributed multigpu, entmax) -- we currently only support PyTorch 1.4 - ## Features +- :warning: **New in OpenNMT-py 2.0**: [On the fly data processing]([here](https://opennmt.net/OpenNMT-py/FAQ.html#what-are-the-readily-available-on-the-fly-data-transforms).) + - [Seq2Seq models (encoder-decoder) with multiple RNN cells (lstm/gru) and attention (dotprod/mlp) types](http://opennmt.net/OpenNMT-py/options/train.html#model-encoder-decoder) - [Transformer models](http://opennmt.net/OpenNMT-py/FAQ.html#how-do-i-use-the-transformer-model) - [Copy and Coverage Attention](http://opennmt.net/OpenNMT-py/options/train.html#model-attention) - [Pretrained Embeddings](http://opennmt.net/OpenNMT-py/FAQ.html#how-do-i-use-pretrained-embeddings-e-g-glove) - [Source word features](http://opennmt.net/OpenNMT-py/options/train.html#model-embeddings) -- [Image-to-text processing](http://opennmt.net/OpenNMT-py/im2text.html) -- [Speech-to-text processing](http://opennmt.net/OpenNMT-py/speech2text.html) - [TensorBoard logging](http://opennmt.net/OpenNMT-py/options/train.html#logging) - [Multi-GPU training](http://opennmt.net/OpenNMT-py/FAQ.html##do-you-support-multi-gpu) - [Data preprocessing](http://opennmt.net/OpenNMT-py/options/preprocess.html) - [Inference (translation) with batching and beam search](http://opennmt.net/OpenNMT-py/options/translate.html) -- Inference time loss functions. -- [Conv2Conv convolution model] -- SRU "RNNs faster than CNN" paper +- Inference time loss functions +- [Conv2Conv convolution model](https://arxiv.org/abs/1705.03122) +- SRU "RNNs faster than CNN" [paper](https://arxiv.org/abs/1709.02755) - Mixed-precision training with [APEX](https://github.com/NVIDIA/apex), optimized on [Tensor Cores](https://developer.nvidia.com/tensor-cores) ## Quickstart [Full Documentation](http://opennmt.net/OpenNMT-py/) +### Step 1: Prepare the data -### Step 1: Preprocess the data +To get started, we propose to download a toy English-German dataset for machine translation containing 10k tokenized sentences: ```bash -onmt_preprocess -train_src data/src-train.txt -train_tgt data/tgt-train.txt -valid_src data/src-val.txt -valid_tgt data/tgt-val.txt -save_data data/demo +wget https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz +tar xf toy-ende.tar.gz +cd toy-ende ``` -We will be working with some example data in `data/` folder. - The data consists of parallel source (`src`) and target (`tgt`) data containing one sentence per line with tokens separated by a space: * `src-train.txt` @@ -90,42 +130,97 @@ The data consists of parallel source (`src`) and target (`tgt`) data containing * `src-val.txt` * `tgt-val.txt` -Validation files are required and used to evaluate the convergence of the training. It usually contains no more than 5000 sentences. +Validation files are used to evaluate the convergence of the training. It usually contains no more than 5k sentences. + +```text +$ head -n 3 toy_ende/src-train.txt +It is not acceptable that , with the help of the national bureaucracies , Parliament 's legislative prerogative should be made null and void by means of implementing provisions whose content , purpose and extent are not laid down in advance . +Federal Master Trainer and Senior Instructor of the Italian Federation of Aerobic Fitness , Group Fitness , Postural Gym , Stretching and Pilates; from 2004 , he has been collaborating with Antiche Terme as personal Trainer and Instructor of Stretching , Pilates and Postural Gym . +" Two soldiers came up to me and told me that if I refuse to sleep with them , they will kill me . They beat me and ripped my clothes . +``` + +We need to build a **YAML configuration file** to specify the data that will be used: + +```yaml +# toy_en_de.yaml +## Where the vocab(s) will be written +save_data: toy-ende/run/example +# Prevent overwriting existing files in the folder +overwrite: False -After running the preprocessing, the following files are generated: +# Corpus opts: +data: + corpus_1: + path_src: toy-ende/src-train.txt + path_tgt: toy-ende/tgt-train.txt + valid: + path_src: data/src-val.txt + path_tgt: data/tgt-val.txt +... -* `demo.train.pt`: serialized PyTorch file containing training data -* `demo.valid.pt`: serialized PyTorch file containing validation data -* `demo.vocab.pt`: serialized PyTorch file containing vocabulary data +``` + +From this configuration, we can build the vocab(s), that will be necessary to train the model: +```bash +onmt_build_vocab -config toy_en_de.yaml -n_sample 10000 +``` +**Notes**: +- `-n_sample` is required here -- it represents the number of lines sampled from each corpus to build the vocab. +- This configuration is the simplest possible, without any tokenization or other *transforms*. See [other example configurations](https://github.com/OpenNMT/OpenNMT-py/tree/master/config) for more complex pipelines. -Internally the system never touches the words themselves, but uses these indices. ### Step 2: Train the model +To train a model, we need to **add the following to the YAML configuration file**: +- the vocabulary path(s) that will be used; +- training specific parameters. + +```yaml +# toy_en_de.yaml + +... + +# Vocabulary files that were just created +src_vocab: toy-ende/run/example.vocab.src +tgt_vocab: toy-ende/run/example.vocab.tgt + +# Train on a single GPU +world_size: 1 +gpu_ranks: [0] + +# Where to save the checkpoints +save_model: toy-ende/run/model +save_checkpoint_steps: 500 +train_steps: 1000 +valid_steps: 500 + +``` + +Then you can simply run: + ```bash -onmt_train -data data/demo -save_model demo-model +onmt_train -config toy_en_de.yaml ``` -The main train command is quite simple. Minimally it takes a data file -and a save file. This will run the default model, which consists of a -2-layer LSTM with 500 hidden units on both the encoder/decoder. -If you want to train on GPU, you need to set, as an example: -`CUDA_VISIBLE_DEVICES=1,3` -`-world_size 2 -gpu_ranks 0 1` to use (say) GPU 1 and 3 on this node only. -To know more about distributed training on single or multi nodes, read the FAQ section. +This configuration will run the default model, which consists of a 2-layer LSTM with 500 hidden units on both the encoder and decoder. It will run on a single GPU (`world_size 1` & `gpu_ranks [0]`). + +Before the training process actually starts, the `*.vocab.pt` together with `*.transforms.pt` will be dumpped to `-save_data` with configurations specified in `-config` yaml file. We'll also generate transformed samples to simplify any potentially required visual inspection. The number of sample lines to dump per corpus is set with the `-n_sample` flag. + +For more advanded models and parameters, see [other example configurations](https://github.com/OpenNMT/OpenNMT-py/tree/master/config) or the [FAQ](FAQ). ### Step 3: Translate ```bash -onmt_translate -model demo-model_acc_XX.XX_ppl_XXX.XX_eX.pt -src data/src-test.txt -output pred.txt -replace_unk -verbose +onmt_translate -model toy-ende/run/model_step_1000.pt -src toy-ende/src-test.txt -output toy-ende/pred_1000.txt -gpu 0 -verbose ``` -Now you have a model which you can use to predict on new data. We do this by running beam search. This will output predictions into `pred.txt`. +Now you have a model which you can use to predict on new data. We do this by running beam search. This will output predictions into `toy-ende/pred_1000.txt`. -!!! note "Note" - The predictions are going to be quite terrible, as the demo dataset is small. Try running on some larger datasets! For example you can download millions of parallel sentences for [translation](http://www.statmt.org/wmt16/translation-task.html) or [summarization](https://github.com/harvardnlp/sent-summary). +**Note**: + +The predictions are going to be quite terrible, as the demo dataset is small. Try running on some larger datasets! For example you can download millions of parallel sentences for [translation](http://www.statmt.org/wmt16/translation-task.html) or [summarization](https://github.com/harvardnlp/sent-summary). ## Alternative: Run on FloydHub @@ -158,8 +253,9 @@ Major contributors are: [Guillaume Klein](https://github.com/guillaumekln) (Systran) [Paul Tardy](https://github.com/pltrdy) (Ubiqus / Lium) [François Hernandez](https://github.com/francoishernandez) (Ubiqus) +[Linxiao Zeng](https://github.com/Zenglinxiao) (Ubiqus) [Jianyu Zhan](http://github.com/jianyuzhan) (Shanghai) -[Dylan Flaute](http://github.com/flauted (University of Dayton) +[Dylan Flaute](http://github.com/flauted) (University of Dayton) and more ! OpenNMT-py belongs to the OpenNMT project along with OpenNMT-Lua and OpenNMT-tf. @@ -183,4 +279,4 @@ OpenNMT-py belongs to the OpenNMT project along with OpenNMT-Lua and OpenNMT-tf. url = {https://doi.org/10.18653/v1/P17-4012}, doi = {10.18653/v1/P17-4012} } -``` +``` \ No newline at end of file diff --git a/data/align_data.yaml b/data/align_data.yaml new file mode 100644 index 0000000000..310082da57 --- /dev/null +++ b/data/align_data.yaml @@ -0,0 +1,6 @@ +# Corpus opts: +data: + corpus_align: + path_src: data/src-val.txt + path_tgt: data/tgt-val.txt + path_align: data/val.src-tgt.talp diff --git a/data/data.yaml b/data/data.yaml new file mode 100644 index 0000000000..065b1c0d71 --- /dev/null +++ b/data/data.yaml @@ -0,0 +1,8 @@ +# Corpus opts: +data: + corpus_1: + path_src: data/src-train.txt + path_tgt: data/tgt-train.txt + valid: + path_src: data/src-val.txt + path_tgt: data/tgt-val.txt diff --git a/data/ggnn_data.yaml b/data/ggnn_data.yaml new file mode 100644 index 0000000000..76dfc2599d --- /dev/null +++ b/data/ggnn_data.yaml @@ -0,0 +1,7 @@ +# Corpus opts: +src_vocab: data/ggnnsrcvocab.txt +tgt_vocab: data/ggnntgtvocab.txt +data: + ggnn: + path_src: data/ggnnsrc.txt + path_tgt: data/ggnntgt.txt diff --git a/data/morph/tgt.valid b/data/morph/tgt.valid index 2428d11bb1..3c21febbf3 100644 --- a/data/morph/tgt.valid +++ b/data/morph/tgt.valid @@ -71,7 +71,7 @@ o n t o ɡ e n e z a p r i m j e n a k r e k e t a e r o d r o m -o s l o b a d z a t i +o s l o b a d i t i s t u d i j k r a t k o ʋ i d l u p e ʃ t ʋ o @@ -135,7 +135,7 @@ x a n p r i p o ʋ e d a t ʃ k i n o l o ɡ i j a a s t r o n o m i j s k i -n e i z ʎ e t ʃ i ʋ o s t +n e i z l j e t ʃ i ʋ o s t u s l o ʋ a n s r p s k i e ʋ o l u t s i o n i z a m diff --git a/data/src-train.txt b/data/src-train.txt index 13719e0549..069afde8fe 100644 --- a/data/src-train.txt +++ b/data/src-train.txt @@ -2,7 +2,7 @@ It is not acceptable that , with the help of the national bureaucracies , Parlia Federal Master Trainer and Senior Instructor of the Italian Federation of Aerobic Fitness , Group Fitness , Postural Gym , Stretching and Pilates; from 2004 , he has been collaborating with Antiche Terme as personal Trainer and Instructor of Stretching , Pilates and Postural Gym . " Two soldiers came up to me and told me that if I refuse to sleep with them , they will kill me . They beat me and ripped my clothes . Yes , we also say that the European budget is not about the duplication of national budgets , but about delivering common goals beyond the capacity of nation states where European funds can realise economies of scale or create synergies . -The name of this site , and program name Title purchased will not be displayed . + They would be abiding by the principle of the UN , which precludes military action except in self-defence , which does not apply here . rapporteur . - (FR) Mr President , representatives of the Council and the Commission , ladies and gentlemen , I should like to begin by thanking my colleagues , who entrusted me with this report , and the shadow rapporteur for their respective contributions . Shortly thereafter , Mårthen Cedergran , who had been responsible for vocals , left Bombshell Rocks to establish himself as a tattoo artist . diff --git a/data/val.src-tgt.talp b/data/val.src-tgt.talp new file mode 100644 index 0000000000..79ea389748 --- /dev/null +++ b/data/val.src-tgt.talp @@ -0,0 +1,3000 @@ +0-0 0-1 1-2 3-3 4-4 4-5 5-6 6-7 +1-0 0-1 4-2 6-3 2-4 8-5 16-6 6-7 7-8 7-9 19-10 12-11 11-12 16-13 15-14 18-15 18-16 23-17 21-18 27-20 23-21 28-22 28-23 29-24 32-25 31-26 31-27 33-28 +0-0 1-1 2-2 2-3 5-4 2-5 5-6 6-7 7-8 8-9 10-10 3-11 10-12 11-14 13-15 13-17 14-18 17-19 15-20 19-21 17-22 21-23 20-24 21-25 22-26 23-27 24-28 25-29 26-30 26-31 27-32 +3-0 4-1 9-2 1-3 11-4 5-6 9-7 6-8 10-9 10-10 13-11 24-12 11-13 13-14 13-15 17-16 20-17 17-18 7-19 8-20 22-21 22-22 25-23 25-24 +0-0 1-1 2-2 4-3 3-4 5-5 6-6 6-7 14-8 8-9 8-10 9-11 11-12 12-13 7-14 7-15 12-16 15-17 15-18 +0-0 1-1 2-2 4-3 3-4 5-5 6-6 7-7 8-8 9-9 11-10 18-11 11-12 13-13 15-14 15-15 17-17 19-18 21-19 19-20 21-21 23-22 23-23 24-24 +0-0 2-1 2-2 4-3 5-4 6-5 9-6 9-7 12-8 14-9 17-10 20-11 19-12 14-13 6-14 15-15 19-16 22-18 21-19 22-20 24-21 25-22 25-23 23-24 29-25 +0-0 1-1 1-2 2-3 3-4 4-5 4-6 5-7 6-8 +0-0 2-1 +0-0 1-1 2-2 5-3 5-4 7-5 5-6 8-7 8-8 10-9 12-10 13-11 14-12 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 8-7 9-8 10-9 10-10 14-11 12-12 13-13 17-14 15-15 16-16 18-17 19-18 +0-0 1-1 2-2 2-3 3-4 4-5 3-6 6-7 7-8 +0-0 1-1 1-2 4-3 4-4 3-5 4-6 6-7 7-8 8-9 8-10 12-11 10-12 10-13 11-14 11-15 13-16 15-17 18-19 19-20 17-21 18-22 19-23 21-24 +0-0 1-1 2-2 2-3 3-4 5-5 6-6 7-7 6-8 8-9 7-10 8-11 15-12 10-13 12-14 13-15 15-16 16-17 12-18 18-19 16-20 19-21 +0-0 1-1 2-2 6-3 7-4 5-5 7-6 8-7 11-8 10-9 12-10 13-11 14-12 14-13 17-14 18-15 19-16 +0-0 0-1 2-2 2-3 3-4 4-5 4-6 4-7 6-8 7-9 7-10 8-11 7-12 9-13 9-14 10-15 10-16 12-17 +0-0 2-1 3-2 3-3 4-4 5-5 6-6 8-7 8-8 9-9 10-10 10-11 13-12 14-13 15-14 15-15 16-16 +0-0 2-1 3-2 0-3 1-4 4-5 5-6 5-7 7-8 8-10 10-11 10-12 9-13 10-16 13-17 11-18 15-19 +0-0 1-1 2-2 3-3 4-4 4-6 13-7 8-8 14-9 11-10 18-11 16-13 17-14 13-15 18-16 9-17 11-18 12-19 21-20 25-21 24-22 24-23 28-25 29-26 +0-0 1-1 4-2 3-3 3-4 6-5 7-6 7-7 6-8 8-9 9-10 10-11 13-12 13-13 11-14 15-15 12-16 15-17 16-18 +7-0 6-1 2-2 2-3 1-5 4-6 4-7 7-8 10-9 4-10 10-11 11-12 8-13 11-14 14-15 16-16 15-17 20-18 5-19 17-20 20-21 21-22 16-23 22-24 23-25 24-26 13-27 27-28 26-29 25-30 31-32 30-33 30-34 32-35 33-37 +2-0 1-1 5-2 6-3 8-5 9-6 14-7 11-8 14-9 15-10 +0-0 1-1 2-3 3-4 4-5 5-7 7-8 14-10 14-11 7-12 9-13 10-14 11-15 13-16 14-17 15-18 14-19 17-20 14-21 16-22 15-23 19-24 20-25 21-26 22-27 22-29 24-30 26-31 27-32 27-34 +0-0 2-1 0-2 7-3 3-4 4-5 5-6 6-7 7-8 8-9 8-10 10-11 10-12 11-13 15-14 15-15 13-16 14-17 16-18 16-19 17-20 +1-0 5-1 2-2 2-3 5-4 7-5 7-6 9-8 11-9 13-10 11-11 13-12 14-13 +0-0 1-1 2-2 3-3 0-5 11-6 10-7 11-8 12-9 15-10 18-11 19-12 +2-0 2-1 4-3 6-5 10-7 13-8 11-9 19-12 17-13 16-15 20-16 22-17 23-18 24-19 25-20 +0-0 2-1 4-2 4-3 7-4 9-5 9-6 12-7 +1-0 1-1 3-2 4-3 +0-0 1-1 3-2 3-3 5-4 8-5 10-6 9-7 12-8 13-9 +1-0 1-1 2-2 3-3 3-4 5-5 5-6 5-7 8-8 +0-0 0-1 0-2 2-3 3-4 2-5 4-6 4-7 5-8 4-9 5-10 6-11 6-12 7-13 +0-0 4-1 5-2 1-3 3-4 1-5 5-6 6-7 8-8 7-9 8-10 9-11 11-12 11-13 12-14 +0-0 2-1 2-2 7-3 5-4 6-5 6-6 8-7 +1-0 0-1 0-2 3-3 6-4 12-5 8-6 7-7 7-8 8-9 12-10 13-11 14-12 13-13 16-14 18-15 19-16 17-17 17-18 16-19 20-20 +1-1 2-2 0-3 4-4 5-5 4-6 8-7 8-8 9-9 10-10 13-11 13-12 12-13 14-14 15-15 16-16 17-17 18-18 20-19 19-20 20-21 21-22 23-23 24-24 25-25 +2-0 2-1 0-2 2-3 4-4 7-5 8-6 7-7 9-8 12-9 10-10 12-11 13-12 14-13 +0-0 1-1 3-2 2-3 4-4 7-5 5-8 6-9 8-10 9-11 9-12 10-13 12-15 6-16 20-17 13-19 14-21 19-23 21-24 17-25 21-26 22-28 21-29 25-30 23-31 25-32 27-33 19-34 27-35 30-36 30-37 32-38 33-39 32-40 29-42 35-43 +0-0 8-1 9-2 4-3 5-4 5-5 4-6 5-7 10-8 12-9 12-10 12-11 13-12 12-13 12-14 16-15 +9-0 2-1 2-2 2-3 2-4 6-5 7-6 7-7 10-8 15-9 13-10 8-11 13-12 17-14 15-16 20-17 19-18 20-19 22-20 24-21 25-22 26-23 27-24 +0-0 1-1 2-2 3-3 3-4 4-5 6-6 5-7 6-8 7-9 8-10 +0-0 0-1 1-2 +0-0 7-1 8-2 4-3 4-4 5-5 4-6 5-7 9-8 11-9 11-10 11-11 12-12 11-13 11-14 16-15 9-16 11-17 21-18 18-19 5-20 21-21 17-22 18-23 22-24 +0-0 3-1 2-2 4-3 5-4 7-5 9-6 8-7 13-8 14-9 10-10 9-11 10-12 16-13 17-14 17-15 18-16 +5-1 10-3 2-4 5-5 7-6 14-7 9-8 9-9 6-10 11-11 1-12 11-13 3-14 18-15 19-16 20-17 21-18 18-19 21-20 18-21 22-22 23-23 25-24 26-25 28-26 26-27 27-28 30-29 31-30 31-31 32-32 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 5-7 6-8 8-9 8-10 9-11 +0-0 1-1 1-2 3-3 4-4 5-5 4-6 5-7 7-8 7-9 7-10 8-11 +0-0 0-1 1-2 3-3 4-4 5-5 6-6 8-7 8-9 9-10 11-11 11-12 12-13 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 8-8 7-9 11-10 10-11 10-12 13-13 15-14 13-15 15-16 16-17 +1-0 13-1 6-2 15-3 19-4 9-6 21-7 10-8 10-9 12-10 11-11 14-12 14-13 5-14 18-15 8-17 22-18 23-19 +0-0 4-1 1-2 2-3 5-4 5-5 7-6 6-7 8-8 9-9 8-10 10-11 11-12 12-13 13-14 14-15 +0-0 0-1 1-2 3-3 3-4 3-5 +0-0 1-1 1-2 2-3 4-4 5-5 6-6 7-7 7-8 6-9 11-10 10-11 9-12 10-13 +0-0 2-1 2-2 4-3 5-4 6-5 6-6 6-7 9-8 9-9 10-10 +0-0 0-1 4-2 0-3 5-4 5-5 6-6 5-7 7-8 8-9 9-10 10-11 11-12 11-13 13-14 15-16 15-17 16-18 17-19 +1-0 3-1 3-3 7-5 7-6 5-7 6-8 9-9 11-10 12-11 12-12 14-13 16-14 17-15 20-16 21-17 22-18 22-19 24-21 25-22 21-23 27-24 28-25 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 7-7 9-8 +0-0 2-1 3-2 4-3 5-4 6-5 8-6 9-7 11-8 11-9 14-10 15-11 16-12 17-13 +1-0 0-1 1-2 2-3 3-4 5-5 7-6 9-7 7-8 8-9 10-11 9-12 16-13 10-14 10-15 18-16 15-17 17-18 15-19 18-20 21-21 24-22 25-23 27-25 22-26 23-27 27-28 28-32 32-33 32-34 34-35 34-36 35-37 +0-0 0-1 2-3 3-4 2-5 1-6 5-7 6-8 6-9 7-10 8-11 11-12 8-13 13-14 10-15 12-16 12-17 14-18 14-19 15-20 +0-0 2-1 4-2 5-3 0-5 1-6 8-7 11-8 11-9 12-10 14-11 14-12 15-13 +0-0 7-1 3-2 3-3 2-4 3-5 13-6 7-7 10-8 11-9 17-11 18-12 16-14 15-15 15-16 24-18 19-19 19-20 24-24 25-25 24-27 31-30 32-31 30-32 31-33 33-34 34-35 +0-0 1-1 2-2 3-3 4-4 7-5 6-6 9-7 9-8 9-9 10-10 +0-0 1-1 4-2 6-3 12-4 10-6 10-7 14-8 13-9 17-10 15-11 19-12 21-13 +2-0 1-1 2-2 9-3 15-4 5-5 9-6 7-7 9-8 10-9 8-10 12-11 13-12 13-13 18-14 20-16 21-17 22-18 25-19 26-20 27-21 28-22 28-23 28-24 29-25 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 5-7 8-8 8-9 9-10 10-11 11-12 +0-0 1-1 2-2 3-3 4-4 5-5 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 8-7 7-8 8-9 5-10 12-11 13-12 14-13 15-14 +1-0 1-1 1-2 4-3 4-4 5-5 6-6 7-7 10-8 8-9 8-10 9-11 13-12 14-13 15-14 16-15 17-16 17-17 18-18 +4-1 2-2 1-3 6-4 8-5 8-6 13-8 12-9 16-10 14-11 22-12 23-13 24-14 22-15 18-16 24-17 25-18 +1-0 1-1 6-2 8-3 2-4 3-5 4-6 4-7 1-8 9-9 8-10 8-11 9-12 7-13 11-14 13-15 12-16 13-17 10-18 12-19 13-20 14-21 13-23 16-25 17-26 17-27 18-28 +0-0 4-1 3-2 4-3 7-4 3-5 3-6 3-7 7-8 7-9 7-10 7-11 9-12 9-13 10-14 11-15 9-16 13-17 14-18 14-19 15-20 16-21 17-22 12-23 8-24 18-25 +0-0 6-1 2-2 3-3 5-4 7-5 3-8 10-9 12-10 11-11 19-12 20-13 16-14 17-15 15-16 15-17 24-19 25-20 27-21 20-22 21-23 27-24 28-25 +0-0 1-1 0-2 3-3 4-4 6-5 5-6 1-7 9-8 8-9 10-10 11-11 +4-0 2-1 1-2 6-3 8-4 8-5 11-6 14-8 18-9 17-10 21-11 19-12 27-13 28-14 29-15 27-16 23-17 29-18 30-19 +0-0 1-1 2-2 4-3 2-4 12-6 8-7 5-8 13-9 14-10 13-11 13-12 17-13 15-14 17-15 22-16 20-17 24-18 25-19 26-20 28-21 31-22 28-23 28-24 30-25 32-26 31-27 37-28 38-29 36-30 38-31 41-32 42-33 +0-0 1-1 1-2 2-3 4-4 4-5 6-6 6-7 7-8 8-9 8-10 8-11 8-12 +0-0 0-1 1-2 2-3 3-4 4-5 3-6 6-7 5-8 8-9 9-10 9-11 9-12 16-13 11-14 6-16 14-17 18-18 14-19 7-20 16-21 14-22 18-23 20-24 18-25 19-26 21-27 +0-0 2-1 2-2 5-3 4-4 6-5 8-6 8-7 10-8 11-9 12-10 13-11 14-12 15-13 +0-0 0-1 1-2 1-3 2-4 3-5 4-6 6-7 7-8 5-9 7-10 21-11 7-12 12-14 11-15 13-16 15-17 13-18 15-20 14-21 10-22 16-23 20-24 11-25 15-26 21-27 20-29 22-30 +2-0 5-1 3-2 3-3 14-4 6-5 4-6 6-7 6-8 7-9 9-10 10-11 11-12 12-13 17-14 15-15 17-16 19-17 17-18 20-19 23-21 21-22 22-23 25-24 +0-0 0-1 1-2 3-3 2-4 6-5 4-6 5-7 11-8 9-9 10-10 10-11 13-12 12-13 15-14 20-15 21-16 22-17 19-18 24-19 21-20 21-21 24-23 25-24 +1-0 3-1 2-2 3-3 4-4 0-5 10-6 8-7 8-8 7-9 10-11 13-12 7-13 8-14 0-15 17-17 18-18 21-19 20-20 15-21 23-22 24-23 25-24 22-25 29-27 29-28 30-30 31-31 38-32 24-33 37-34 37-35 41-36 33-37 34-38 39-39 36-40 32-41 42-42 +1-0 3-1 5-2 6-3 +0-0 1-1 0-2 3-3 9-4 4-5 6-6 7-7 11-8 8-9 10-10 15-13 16-14 19-15 20-16 17-17 18-18 21-20 22-21 23-22 +0-0 1-1 0-2 2-3 3-4 4-5 5-6 6-7 7-8 7-9 8-10 11-11 10-12 7-13 13-14 13-15 13-16 13-17 15-18 16-19 20-20 19-22 21-23 12-24 22-25 22-26 +0-0 2-1 2-2 3-3 4-4 9-5 9-6 10-7 10-8 11-9 14-10 15-11 14-12 17-13 18-14 18-15 19-16 +0-0 0-1 2-2 0-3 5-4 7-6 17-8 8-10 9-11 10-13 14-14 10-15 12-16 12-17 13-18 14-19 15-20 16-21 19-22 20-23 20-24 19-25 19-26 17-27 22-28 +0-0 3-1 7-2 5-3 6-4 6-5 6-6 9-7 2-8 2-9 9-11 12-12 15-13 15-14 16-15 17-16 18-17 19-18 23-19 22-20 13-21 21-22 22-23 27-24 27-25 27-26 32-27 33-28 29-29 31-30 30-31 39-32 37-33 32-34 36-35 36-36 40-37 +0-0 1-1 2-2 3-3 3-4 6-5 +0-0 1-1 1-2 2-3 2-4 3-5 4-6 5-7 6-8 11-9 8-10 11-11 7-12 8-13 10-14 10-15 11-16 17-17 12-18 8-19 8-20 15-21 14-22 16-23 16-24 18-25 19-26 19-27 +0-0 3-1 3-2 4-3 5-4 6-5 8-6 9-7 10-8 9-9 8-10 14-11 +2-0 1-1 1-2 3-3 2-4 4-5 8-6 6-7 6-8 7-9 9-10 10-11 +1-0 1-1 3-2 4-3 5-4 6-5 8-6 7-7 9-8 11-9 14-12 16-13 16-14 19-15 +0-0 1-1 1-2 2-3 3-4 4-5 5-6 6-7 7-8 8-9 9-10 +0-0 1-1 1-2 2-3 3-4 4-5 2-6 5-7 6-8 4-9 4-10 10-11 10-12 12-13 12-14 14-15 15-17 14-18 14-19 14-20 16-21 +0-0 0-1 1-2 2-3 3-4 3-5 6-6 6-7 7-8 8-9 12-10 13-11 9-12 11-13 14-14 14-15 +0-0 2-1 1-2 1-3 2-4 4-5 3-6 7-7 4-8 6-9 8-10 18-11 18-12 14-13 15-14 17-16 25-17 26-18 5-19 22-20 20-21 20-23 19-24 26-25 27-26 +0-0 0-1 1-2 2-3 3-4 3-5 +0-0 0-1 1-2 2-3 4-4 6-5 8-7 7-8 8-9 9-10 10-11 11-12 12-13 13-14 14-15 17-16 16-17 13-18 20-19 20-20 16-21 23-22 25-23 26-24 26-25 19-26 29-27 29-28 30-29 +0-0 1-1 4-2 5-3 6-4 7-5 8-6 9-7 8-8 9-9 17-10 9-11 10-12 11-13 11-14 11-15 12-16 15-17 16-19 17-20 18-21 +3-0 1-1 2-2 3-3 5-4 7-5 7-6 8-7 9-8 11-9 13-10 13-11 14-12 +0-0 1-1 3-2 4-3 4-4 4-5 6-6 6-7 7-8 9-9 10-10 12-11 11-12 13-13 13-14 14-15 16-16 16-17 18-18 19-19 20-20 +0-0 3-1 1-2 3-3 5-5 6-6 7-7 7-8 10-9 11-10 12-11 13-12 +0-0 0-1 3-2 2-3 4-5 12-7 5-8 6-9 8-10 9-11 10-12 12-13 10-14 11-15 13-16 13-17 14-18 14-19 15-20 +0-0 1-1 2-2 +0-0 3-1 3-2 1-3 6-4 2-5 4-6 4-7 6-8 5-9 9-10 7-11 6-12 10-13 10-14 11-15 8-16 13-17 16-18 14-20 17-21 18-22 17-23 19-24 +0-0 0-1 2-2 2-3 2-4 2-5 4-7 4-8 21-10 23-11 12-12 11-13 10-15 26-16 27-17 28-18 29-19 10-20 16-21 17-22 16-24 22-26 19-27 20-28 20-29 24-30 22-31 25-32 23-33 26-34 24-35 15-36 15-37 24-39 33-40 32-41 27-42 35-45 34-47 32-48 37-49 +2-0 1-1 0-2 6-3 6-4 8-5 8-6 12-7 9-8 7-9 15-10 14-11 17-12 14-13 15-14 19-15 23-16 20-17 21-18 20-19 20-20 24-21 +0-0 8-1 1-2 2-3 1-4 3-5 2-6 8-7 10-8 7-9 12-10 8-11 9-12 11-14 11-16 14-17 15-19 13-20 16-21 19-22 17-23 20-24 19-25 20-26 21-27 +1-0 1-1 2-2 3-3 4-4 4-5 6-6 7-7 9-8 9-9 10-10 +0-0 3-1 3-2 7-4 4-5 5-6 6-7 7-8 9-9 8-10 11-11 12-12 10-13 13-14 8-15 13-16 12-17 12-18 16-19 15-20 20-21 16-22 19-23 20-24 19-25 22-26 +0-0 1-1 0-2 2-3 1-4 3-5 4-6 4-7 5-8 6-9 7-10 +0-0 1-1 2-2 0-3 4-4 4-5 6-6 5-7 5-8 7-9 8-10 8-11 12-12 13-13 11-14 13-16 16-17 17-18 15-20 15-21 19-22 20-24 21-25 +0-0 2-1 3-2 3-3 5-4 6-5 +1-0 1-1 3-2 3-3 7-4 3-5 4-6 8-7 10-8 11-9 12-10 11-11 14-12 13-13 14-14 15-15 16-16 +1-0 1-1 0-2 4-4 4-5 5-6 6-7 7-8 9-9 10-10 11-11 10-12 13-13 13-14 15-15 16-16 +0-0 2-1 19-2 4-3 5-4 5-5 10-6 9-7 11-8 12-9 9-10 15-11 18-12 21-13 19-14 24-15 25-16 23-17 24-18 29-20 29-21 32-22 32-23 31-24 33-25 +1-0 1-1 3-2 3-3 4-4 6-5 7-7 8-8 +1-0 3-1 2-2 3-3 3-4 4-5 4-6 5-7 6-8 7-9 9-10 12-11 10-12 10-13 12-14 14-15 14-16 15-17 15-18 17-19 19-20 19-21 20-22 +0-0 0-1 1-4 5-5 8-6 7-7 9-8 11-9 9-10 10-11 12-12 13-13 11-14 15-15 17-16 19-17 20-18 20-19 20-20 22-21 +0-0 3-1 3-3 2-4 5-5 6-6 6-7 7-8 5-9 9-10 11-11 16-13 18-14 14-15 14-16 16-17 19-19 21-20 20-21 25-22 23-23 23-24 23-25 27-26 30-27 26-28 26-29 29-30 31-31 33-32 33-33 33-34 34-35 +3-0 1-1 1-2 4-3 5-4 6-5 8-6 9-7 8-8 12-9 7-10 11-11 13-12 13-13 14-14 15-15 16-16 14-17 17-18 19-20 20-21 21-22 25-23 22-24 26-25 28-26 29-27 30-28 25-29 31-30 31-31 32-33 +1-0 1-1 0-2 2-3 3-4 4-5 7-6 8-7 10-8 9-9 11-10 12-11 13-12 13-13 14-14 +0-0 1-1 1-2 2-3 3-4 7-5 5-6 6-7 8-9 9-10 13-11 10-12 11-13 12-14 18-15 12-16 15-17 16-18 18-19 17-20 20-21 18-22 20-23 19-24 22-25 22-26 24-27 26-28 25-29 26-30 +0-0 0-1 8-2 3-3 6-4 6-5 7-6 7-7 11-8 9-9 13-10 12-11 13-12 14-13 15-14 14-15 18-16 16-17 18-18 19-19 20-20 +0-0 3-1 2-2 5-3 6-4 2-5 11-6 10-7 12-8 9-9 10-10 10-11 15-12 16-13 17-14 10-15 10-16 18-17 10-18 21-19 19-20 22-22 25-23 22-24 18-25 26-27 +0-0 0-1 2-2 2-3 3-4 5-5 4-6 4-7 8-8 9-9 10-10 13-11 12-13 11-14 13-15 17-17 +0-0 1-1 0-2 3-3 3-4 7-5 8-6 6-7 6-8 7-9 9-10 9-11 10-12 12-13 12-14 13-15 15-16 13-17 15-18 16-19 17-20 +1-0 1-1 0-2 4-4 3-5 6-6 6-7 8-8 9-10 9-11 11-12 10-13 12-14 13-15 14-16 +0-0 2-1 4-2 3-3 5-4 3-5 5-6 6-7 7-9 8-10 10-11 10-12 12-13 12-14 13-15 14-16 14-17 15-18 16-19 +3-0 0-1 2-2 1-3 3-4 4-5 6-6 6-7 6-8 1-9 8-11 13-13 12-14 10-15 13-16 13-17 15-18 35-19 15-20 18-21 18-22 18-23 22-24 23-25 22-26 23-27 8-28 21-29 26-30 26-31 30-32 30-34 23-35 29-36 37-37 30-38 31-39 32-40 27-41 34-43 35-44 37-45 36-46 38-47 39-48 39-49 41-50 +1-0 2-1 1-2 0-3 5-4 7-5 7-6 8-7 11-8 13-9 10-10 14-11 15-12 16-13 17-14 17-15 17-16 20-17 +0-0 0-1 0-2 0-3 1-4 2-5 4-6 4-8 6-9 5-10 7-11 5-12 8-13 10-14 12-15 11-16 11-17 13-18 10-19 14-20 15-21 14-22 16-23 15-24 17-25 17-26 18-27 +0-0 1-1 1-2 1-3 2-4 4-5 4-6 5-7 6-8 7-9 8-10 9-11 10-12 10-13 11-14 +0-0 1-1 2-2 3-3 5-4 3-5 3-6 4-7 8-8 10-9 8-10 8-11 9-12 12-13 19-16 15-18 13-19 18-21 19-22 22-23 23-24 22-25 21-26 21-27 26-28 27-29 26-30 27-31 29-32 +0-0 2-1 13-2 5-3 4-4 5-5 9-6 8-7 12-8 15-9 +0-0 1-1 2-2 5-3 5-4 6-5 +0-0 0-1 1-3 1-4 1-5 2-6 2-7 3-8 3-9 4-10 4-11 +1-0 5-1 6-2 6-3 4-4 8-5 9-6 11-7 12-8 14-9 15-10 16-11 17-12 18-13 19-14 19-15 +4-0 1-1 3-2 4-3 8-4 4-5 6-6 9-7 10-8 11-9 13-10 14-11 14-12 17-13 18-14 +0-0 1-1 2-2 3-3 3-4 3-5 7-6 7-7 5-8 9-9 12-10 11-11 11-12 14-13 13-14 12-15 15-16 17-17 18-18 19-19 +0-0 1-1 2-2 2-3 5-4 6-5 7-6 7-7 7-8 12-9 12-10 13-11 12-12 13-13 14-14 +1-0 4-1 4-2 4-3 7-4 +0-0 6-1 7-2 1-3 1-4 3-5 5-6 12-7 8-8 14-9 10-10 8-11 8-12 13-13 13-14 12-15 20-16 15-17 16-18 17-19 18-20 23-21 24-22 23-23 22-24 25-25 28-26 28-27 27-28 30-29 27-30 31-31 29-32 31-33 37-34 34-35 23-36 35-37 33-38 27-39 38-40 +2-0 1-1 2-2 2-3 3-4 7-5 4-6 6-7 8-8 10-9 8-10 11-12 12-13 12-14 15-15 15-16 16-18 +0-0 1-1 1-2 5-3 4-4 7-5 6-6 8-7 +1-0 1-1 4-2 2-3 2-4 7-5 8-6 8-7 8-8 9-9 12-10 15-11 14-12 16-13 19-14 20-15 17-16 21-17 22-18 +0-0 0-1 2-2 4-3 4-4 5-5 4-6 7-7 6-8 9-9 8-10 11-11 +0-0 1-2 1-3 4-4 4-5 5-6 7-7 9-9 9-10 13-12 13-13 13-14 16-15 13-16 15-17 17-18 18-19 19-20 20-21 20-22 25-24 22-25 24-26 26-27 23-28 26-29 28-30 27-31 29-32 30-33 +0-0 2-1 2-2 3-3 4-4 5-5 +1-0 3-1 1-2 4-3 6-4 6-5 5-6 7-7 8-8 10-9 10-10 17-11 13-12 15-13 16-14 12-15 11-16 20-17 17-18 20-19 24-20 24-21 26-22 +0-0 11-1 2-2 2-3 3-4 4-5 7-6 8-8 9-9 9-10 8-11 14-12 15-13 14-14 13-15 17-16 18-17 19-18 21-19 16-20 22-21 +2-0 2-1 4-2 4-3 4-4 7-5 5-6 6-7 11-8 5-9 18-10 19-11 15-12 16-13 16-14 21-15 22-16 18-17 20-18 17-20 24-21 25-22 +1-0 1-1 1-3 4-4 2-5 5-6 8-8 9-9 10-10 10-11 11-12 +0-0 4-1 5-2 1-3 1-4 3-5 4-6 5-7 6-8 6-9 7-10 10-11 2-12 8-13 10-14 11-15 11-16 12-17 +0-0 3-1 6-2 2-3 6-4 6-5 3-6 5-7 6-8 7-9 6-10 9-11 12-12 12-13 10-14 14-15 13-17 14-18 15-19 18-20 21-21 21-22 22-23 22-24 +1-0 1-1 2-2 5-3 4-4 4-5 7-6 8-7 7-8 11-9 12-10 13-11 +0-0 1-1 1-2 5-4 3-5 7-7 8-8 10-9 10-10 9-11 12-12 15-13 8-14 15-15 14-16 18-17 18-18 22-19 21-20 24-21 22-22 24-23 +0-0 1-1 1-2 5-3 2-4 5-6 6-7 9-9 10-10 7-11 11-12 9-13 8-14 9-15 13-16 10-17 9-18 14-19 17-20 15-21 15-22 15-23 18-25 19-27 20-28 19-29 22-30 21-31 22-32 23-34 25-36 25-37 26-38 +0-0 0-1 2-2 5-4 2-5 5-6 6-7 10-8 5-9 12-12 10-13 14-14 15-15 13-16 14-18 16-19 17-20 17-21 22-23 22-25 23-26 22-27 23-28 +4-0 5-1 6-2 2-3 11-4 1-5 1-6 9-7 8-8 11-9 17-10 12-11 11-12 11-13 11-14 16-15 14-16 14-17 15-18 19-19 21-20 22-21 22-22 22-23 23-24 26-25 24-26 25-27 28-28 27-29 28-30 29-31 30-32 +0-0 1-1 1-2 3-3 2-5 2-6 6-7 6-8 7-9 9-10 10-11 11-12 10-13 12-14 14-15 10-16 15-17 16-18 18-19 18-20 15-21 19-22 +5-0 2-1 3-2 6-3 7-4 9-5 11-6 13-7 13-8 16-9 13-10 17-11 +0-0 1-1 4-3 4-4 4-5 6-6 6-7 9-8 7-9 12-10 9-11 11-12 6-13 12-14 12-15 12-16 14-17 16-18 17-19 13-21 17-22 18-23 20-24 22-25 21-26 21-27 11-28 12-29 32-31 27-32 26-33 28-34 30-35 25-36 29-37 31-38 32-39 +0-0 1-1 7-2 3-3 4-4 5-5 6-6 6-7 1-8 8-9 14-10 10-11 7-12 12-13 13-14 13-15 13-16 14-17 15-18 8-19 18-20 20-21 20-22 19-23 21-24 +0-0 2-1 3-2 1-3 3-4 5-5 4-6 4-7 5-8 6-9 5-10 7-11 8-12 8-13 8-14 10-15 9-16 11-17 +0-0 3-1 1-2 3-3 4-4 2-5 4-6 4-7 6-8 8-9 8-10 9-11 9-12 12-13 14-14 11-15 13-16 13-17 13-20 16-21 +5-0 6-1 5-2 4-3 5-4 6-5 8-6 5-7 9-8 6-9 9-10 14-11 13-12 13-13 18-14 6-16 17-17 17-18 6-19 21-20 17-21 20-24 25-25 26-26 30-27 30-28 32-29 29-31 34-32 17-33 37-34 29-41 43-43 44-44 46-45 51-46 61-49 52-50 52-51 51-52 65-54 58-57 60-58 61-59 68-61 63-62 64-64 64-65 55-66 67-68 69-69 +0-0 2-1 4-3 3-4 5-5 1-6 6-7 8-8 7-9 10-11 10-12 13-13 14-14 14-15 13-16 14-17 15-18 13-19 18-20 19-21 20-22 21-23 +0-0 2-1 3-2 4-3 6-4 7-5 7-6 8-7 9-8 9-9 10-10 12-11 16-13 16-14 17-15 17-16 20-17 21-18 20-19 23-20 24-21 +0-0 1-1 4-2 3-3 4-4 2-5 5-6 +1-0 4-1 3-2 3-3 3-4 4-5 5-6 2-7 8-9 9-10 10-11 10-12 10-13 11-14 12-15 13-16 14-17 +0-0 1-1 1-2 2-3 3-4 3-5 4-6 +0-0 1-1 2-2 1-4 3-5 5-6 4-7 4-8 4-9 6-10 6-11 4-12 7-13 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 6-7 9-8 10-9 +0-0 0-1 2-2 2-3 5-4 4-5 5-6 5-7 6-8 6-9 7-10 9-11 12-12 11-13 12-14 14-16 14-17 15-18 +1-0 0-1 0-2 4-3 0-4 7-6 7-7 10-9 14-10 9-11 14-12 14-13 15-14 16-15 11-16 17-17 21-18 22-19 +0-0 1-1 1-2 3-3 5-4 3-5 5-6 5-7 6-8 6-9 8-10 8-11 8-12 9-13 9-14 +0-0 +0-0 2-1 2-2 3-3 6-4 9-5 6-6 15-7 17-8 12-9 12-10 15-11 21-12 18-13 18-14 18-15 20-16 22-17 +0-0 1-1 1-2 2-3 2-4 3-5 4-6 4-9 7-10 8-11 9-12 10-13 11-14 10-15 12-16 11-17 15-19 16-20 16-21 19-23 18-24 12-26 22-27 19-28 23-29 +0-0 0-1 1-2 5-4 3-5 2-6 4-7 7-8 5-9 6-10 8-11 8-14 10-15 10-16 10-17 10-18 12-19 13-20 14-21 +1-0 1-1 2-3 4-4 10-5 6-6 6-7 6-9 6-10 9-11 11-13 12-14 12-15 11-16 12-17 13-18 12-19 18-20 15-21 27-23 26-24 21-25 22-26 18-28 18-29 23-30 21-32 26-33 31-35 32-37 32-38 +0-0 1-1 2-3 3-4 4-5 5-6 6-7 6-8 6-9 7-10 7-11 7-12 8-13 11-14 12-15 13-16 15-17 15-18 16-19 16-20 17-21 17-22 18-23 +0-0 1-1 1-2 1-3 7-4 3-5 5-6 5-7 6-8 6-9 8-10 7-11 11-12 11-13 13-14 15-15 12-16 16-17 17-18 18-19 18-20 +2-0 3-1 2-2 3-4 1-5 5-6 6-7 8-8 9-9 12-10 12-11 10-12 1-13 14-14 13-16 13-17 17-18 15-19 14-20 19-21 21-23 24-24 21-27 23-28 15-29 26-30 26-31 23-32 27-33 27-34 +0-0 3-1 3-2 3-3 4-4 6-5 4-6 7-7 8-8 9-9 10-10 12-11 12-12 13-13 14-14 14-15 16-16 17-17 18-18 +0-0 1-1 2-2 3-3 4-4 5-5 +0-0 0-2 3-3 3-4 5-5 2-6 6-7 6-8 6-9 12-10 11-11 11-12 18-13 13-14 14-15 15-16 13-17 16-18 15-19 17-20 20-22 22-23 23-24 21-25 24-26 26-27 27-28 27-29 27-30 28-31 +3-0 11-1 12-2 1-3 5-5 6-6 8-7 9-8 9-9 0-10 10-11 6-12 12-13 14-14 18-15 16-16 16-17 19-18 21-19 21-20 22-21 25-22 25-23 23-24 19-25 27-26 +0-0 1-1 2-2 2-3 2-4 4-5 4-6 5-7 6-8 +0-0 1-1 1-2 2-3 3-4 5-5 6-6 7-7 9-8 8-9 10-10 11-11 11-12 11-13 12-14 13-15 +0-0 1-1 1-2 2-4 3-5 6-6 4-8 9-9 10-10 11-11 10-12 11-13 14-14 13-15 16-16 13-17 12-18 17-19 17-20 18-21 17-22 20-23 20-24 21-25 +0-0 2-1 2-2 1-3 5-4 4-5 5-6 5-7 6-8 7-9 8-10 10-11 9-12 10-13 11-14 12-15 +2-0 2-1 10-2 4-3 5-4 5-5 6-6 7-7 8-8 14-9 12-10 11-11 7-12 16-13 16-14 17-15 16-16 17-17 16-18 10-20 24-21 13-22 23-23 25-24 +0-0 4-1 5-2 6-3 +0-0 1-1 0-2 5-3 1-4 3-5 4-6 5-7 6-8 11-9 8-10 9-11 11-12 5-13 11-14 14-15 14-16 14-17 16-18 15-19 16-20 18-21 +1-0 2-1 2-2 4-3 4-4 8-5 14-6 10-7 12-8 11-9 13-10 15-11 15-12 17-13 +0-0 2-1 5-2 1-3 1-4 6-5 7-6 10-8 11-9 11-10 12-11 12-12 15-13 16-14 14-15 16-16 17-17 +1-0 1-1 3-2 3-3 9-4 9-5 7-6 7-7 8-8 12-9 9-10 14-11 15-12 18-13 19-14 19-15 19-16 20-17 +0-0 2-1 1-2 2-3 3-4 3-5 4-6 4-7 4-8 6-9 +0-0 2-1 3-2 4-3 6-4 7-5 8-6 10-7 11-8 +1-0 1-1 6-2 4-3 3-4 3-5 9-6 7-7 9-8 10-9 6-11 17-12 14-14 13-15 15-16 15-17 16-18 16-19 17-20 22-21 25-22 25-23 28-24 21-26 25-27 16-28 30-30 22-31 30-32 31-33 31-34 32-35 33-36 34-37 35-38 36-39 +5-0 7-1 1-2 7-3 5-4 9-6 10-7 11-9 10-10 11-12 14-14 14-15 12-16 17-17 +1-0 1-1 1-2 1-3 3-4 3-5 4-6 4-7 5-8 4-9 6-10 7-11 +10-1 1-2 2-4 5-5 4-6 4-7 6-8 6-9 6-10 11-11 13-13 16-14 13-15 17-16 6-17 22-18 19-19 25-20 21-21 18-22 21-23 23-24 22-25 25-26 26-27 26-28 29-29 30-30 30-31 30-32 32-33 +0-0 0-1 1-2 2-3 +0-0 3-1 1-2 5-3 6-4 6-5 12-6 10-8 11-9 13-10 8-11 14-12 16-13 14-14 15-15 17-16 16-17 18-18 21-19 21-20 22-21 +4-0 0-1 12-2 8-3 0-4 2-6 10-8 11-9 10-10 14-11 12-12 13-13 17-14 15-15 16-17 19-18 17-19 18-20 22-21 26-22 28-23 20-24 28-28 33-29 30-30 31-31 32-32 34-33 35-34 35-35 37-36 33-37 38-38 +0-0 1-1 3-2 4-3 4-4 5-5 6-6 8-7 9-8 10-9 +0-0 1-1 1-2 1-4 3-5 3-6 4-7 5-8 3-9 8-10 8-11 7-12 8-13 9-14 9-15 12-16 10-17 11-18 13-19 12-20 10-21 12-22 15-23 16-24 +0-0 3-1 6-2 6-3 8-4 9-5 10-6 14-7 14-8 15-9 +0-0 3-1 3-2 3-3 5-4 4-5 6-6 3-7 14-8 15-9 11-10 12-12 13-13 15-14 16-15 20-16 20-17 19-18 19-19 18-20 17-21 23-22 +0-0 1-1 1-2 3-3 3-4 4-5 6-6 6-7 7-8 +0-0 6-1 2-2 3-3 2-4 6-5 4-6 8-8 +0-0 1-1 3-2 4-3 10-4 9-5 9-6 11-7 10-8 13-9 +0-0 1-1 2-2 4-3 5-4 8-5 9-6 7-7 10-8 8-9 10-10 8-11 12-12 26-13 13-14 12-15 14-16 15-17 16-18 17-19 18-20 15-21 23-22 22-23 23-24 22-26 25-27 37-29 19-30 26-31 29-32 32-33 30-34 31-35 34-36 34-37 32-38 36-39 37-41 37-42 25-43 38-44 39-45 +0-0 0-1 3-2 3-3 3-4 4-5 5-6 6-8 6-9 10-11 11-12 12-13 12-14 13-15 17-16 16-17 17-18 18-19 22-23 17-24 23-26 24-27 25-28 26-29 15-30 28-33 29-34 31-35 32-36 32-37 33-38 +0-0 0-1 1-2 2-3 2-4 3-6 4-7 +0-0 3-2 4-4 8-5 8-6 10-7 12-9 12-10 17-11 14-12 14-13 19-14 20-15 +0-0 1-1 2-2 4-3 3-4 6-5 7-6 8-7 1-8 9-9 11-10 12-11 13-12 18-13 14-14 15-15 15-16 16-17 15-18 18-19 17-20 20-21 21-22 22-23 23-24 24-25 22-26 24-27 26-28 +0-0 1-1 3-2 2-3 10-4 6-5 6-6 4-7 5-8 6-9 11-10 +0-0 1-1 3-2 4-3 4-4 7-5 10-6 6-7 12-8 12-9 14-10 14-11 +1-1 1-3 2-4 2-5 7-6 5-7 3-8 6-9 7-10 11-12 6-13 8-14 10-15 7-16 15-17 13-18 12-19 13-20 16-21 16-22 16-23 14-24 17-25 19-26 15-27 20-28 +0-0 0-1 3-2 4-3 7-4 1-5 5-6 6-7 10-9 9-10 12-12 11-13 15-14 13-15 13-16 14-17 15-18 19-19 18-21 19-22 +2-0 2-1 6-2 4-3 9-4 7-5 11-6 12-7 13-8 15-10 17-11 18-12 18-13 19-14 +0-0 1-1 5-2 6-3 4-4 1-5 6-6 17-7 8-8 11-9 11-10 13-11 12-12 14-13 15-14 20-15 16-16 17-17 19-18 17-19 22-20 +0-0 2-1 3-2 7-3 7-4 16-5 13-7 5-8 13-9 6-10 12-11 11-12 22-13 15-14 16-15 17-16 16-17 19-18 20-19 22-20 22-21 30-22 23-23 25-24 26-25 27-26 28-27 30-28 31-29 32-30 33-31 34-32 34-33 +0-0 1-1 2-2 4-3 5-4 1-5 6-6 7-7 7-8 10-10 15-12 12-13 15-14 19-15 17-16 16-18 17-19 17-20 17-21 26-22 34-23 23-24 28-25 30-27 29-28 29-29 34-30 29-31 32-32 35-33 38-38 38-39 39-40 40-41 43-42 45-43 41-44 40-45 48-46 +0-0 2-1 0-2 2-3 4-4 4-5 5-6 7-7 8-8 6-9 9-10 10-11 10-12 12-13 13-15 13-16 13-17 12-18 13-19 15-20 16-21 20-22 20-23 22-24 23-25 17-26 24-27 +0-0 1-1 2-2 4-3 4-4 6-5 7-6 +4-0 2-1 3-2 4-4 6-5 7-6 8-7 9-8 10-9 5-10 9-11 10-12 13-13 17-14 10-15 16-16 17-17 17-19 15-20 23-22 24-24 12-25 12-26 21-27 22-28 25-29 26-30 +6-0 3-1 1-2 0-3 6-4 6-5 7-6 9-7 11-8 11-9 13-10 14-11 16-12 18-13 17-14 20-15 22-16 23-17 24-18 +0-0 0-1 5-2 4-3 7-5 7-6 8-7 12-8 12-9 12-10 14-11 +1-0 4-1 5-2 4-3 6-4 11-5 9-6 7-7 6-8 12-9 11-10 12-11 13-12 7-13 12-14 19-15 18-16 18-17 19-18 21-19 22-20 19-22 24-23 25-24 26-25 27-26 28-27 29-28 31-29 31-30 32-31 +0-0 1-2 0-3 2-4 8-5 6-7 11-8 8-9 8-10 14-11 9-12 12-13 16-14 17-15 9-16 13-17 15-18 15-19 22-21 19-22 20-23 8-25 21-26 23-27 +0-0 1-1 0-2 3-3 4-4 5-5 6-6 8-7 9-8 14-9 10-10 13-11 14-12 13-13 15-14 16-15 17-16 18-17 19-18 22-19 24-20 19-21 23-22 25-24 25-25 32-26 32-27 36-28 36-29 35-30 37-31 33-32 37-33 +0-0 1-3 2-4 4-5 8-6 14-7 6-8 14-9 12-11 9-12 12-13 11-14 15-15 16-16 11-17 11-18 10-19 13-20 13-21 13-22 16-23 19-26 19-27 25-28 9-29 20-30 22-31 23-32 23-33 24-34 25-35 26-37 27-38 +0-0 1-1 1-2 7-3 3-4 2-5 4-6 5-7 6-8 7-9 6-10 9-11 +0-0 0-1 2-2 5-3 3-4 9-6 9-7 9-8 11-10 12-11 14-12 14-13 18-14 16-15 14-16 17-17 23-19 20-20 24-21 24-22 25-23 28-24 27-25 17-26 29-27 30-28 33-29 31-30 33-31 32-32 36-33 +0-0 0-1 1-2 2-3 4-4 5-5 5-6 6-7 +2-0 1-1 2-2 2-3 4-4 4-5 5-6 8-7 3-8 9-10 6-11 7-12 11-13 9-14 9-15 16-16 16-17 13-18 14-19 16-20 17-21 17-22 18-23 19-24 20-25 21-26 21-27 24-30 25-32 27-33 +3-0 3-2 7-3 3-4 3-5 6-6 5-7 8-8 7-9 8-10 10-11 10-12 12-14 9-15 14-16 15-17 15-18 16-19 17-20 19-21 21-22 18-23 20-24 21-25 22-26 19-27 23-28 +0-0 0-1 1-2 2-3 3-4 5-5 1-6 3-7 4-8 5-9 9-10 10-11 11-13 9-14 11-15 12-16 11-17 13-18 14-19 14-20 16-21 16-22 17-23 18-24 19-25 18-26 20-27 +0-0 0-1 1-2 2-3 3-5 4-6 5-7 6-8 6-9 8-10 8-11 10-12 9-13 9-14 10-15 12-16 12-17 12-18 14-19 16-20 11-21 15-22 15-23 17-24 +4-0 4-1 2-2 6-3 2-4 5-5 9-6 11-7 11-8 12-9 14-10 14-11 12-12 15-14 20-15 24-16 27-18 28-19 23-20 30-21 24-22 26-23 27-24 31-25 31-26 33-27 34-28 +0-0 2-2 5-3 5-4 6-5 6-6 8-7 13-8 11-9 12-10 14-11 15-12 16-13 +0-0 2-1 0-2 5-4 14-5 3-6 5-7 6-8 13-9 9-10 14-11 15-12 20-13 19-14 20-15 21-16 22-17 27-19 27-20 +0-0 4-1 1-2 5-4 6-5 5-6 7-8 8-9 9-10 15-11 11-12 18-13 18-14 22-15 25-18 25-19 33-20 29-21 31-22 32-24 34-25 32-26 36-27 38-28 +0-0 4-1 5-2 3-3 4-4 3-5 4-6 9-8 11-9 10-10 9-11 13-12 15-13 16-14 15-15 19-16 18-17 20-18 21-19 26-20 27-21 25-22 17-23 28-24 29-25 29-26 30-27 +0-0 1-1 2-2 6-3 7-4 5-5 9-6 4-8 8-9 13-10 6-11 10-12 10-13 10-14 10-15 5-16 15-17 18-18 14-19 18-20 17-21 19-22 20-23 21-24 +3-0 3-1 1-2 5-3 6-4 6-5 7-6 8-7 9-8 10-9 12-10 13-11 13-12 14-13 14-14 17-15 19-16 19-17 21-18 22-19 +0-0 2-1 2-2 3-3 6-4 3-5 4-6 8-7 6-8 7-9 11-10 10-11 10-12 16-13 13-14 12-15 15-16 18-17 18-18 18-19 19-20 +0-0 1-1 2-2 4-3 4-4 10-6 6-7 7-8 11-9 12-10 +0-0 3-1 2-2 2-3 2-4 4-5 6-8 8-9 9-10 7-11 11-12 11-13 9-14 12-16 11-17 19-19 13-20 16-21 21-22 13-24 25-25 20-26 28-28 23-29 21-31 30-32 33-33 32-34 32-35 34-36 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 5-7 6-9 7-10 8-11 9-12 10-13 11-14 +0-0 2-1 2-2 0-3 4-5 6-6 6-7 6-8 7-9 7-10 8-11 8-12 9-14 11-17 17-18 15-19 12-20 10-21 16-22 13-23 15-24 15-25 17-26 18-27 20-29 21-30 23-31 24-32 24-33 26-34 16-35 27-36 +0-0 1-1 3-2 3-3 3-4 5-5 5-6 6-7 +1-0 2-1 2-2 3-3 4-4 5-5 6-6 7-7 2-8 10-9 9-10 8-11 10-12 10-13 10-14 12-15 14-17 15-18 16-19 17-20 18-21 17-22 18-23 18-24 19-25 20-26 21-27 +0-0 1-1 2-2 2-3 4-4 4-5 7-6 2-7 7-8 9-9 9-10 10-11 +0-0 1-1 5-2 5-3 5-4 1-8 10-9 10-10 9-11 18-12 12-13 19-14 13-15 18-16 16-17 17-18 17-19 15-20 21-22 21-23 23-24 +0-0 2-1 2-2 2-3 2-4 1-5 3-6 5-7 6-8 8-9 7-10 9-11 9-12 10-13 +1-0 3-1 5-2 6-3 4-4 8-5 9-6 10-8 11-9 14-10 13-11 14-12 15-13 +0-0 3-1 1-2 3-3 5-4 6-5 7-6 9-7 10-8 11-9 +0-0 3-1 4-2 1-3 4-5 6-6 5-7 6-8 8-9 11-10 10-11 6-12 11-13 13-15 14-16 15-17 10-18 20-19 17-20 20-21 17-22 20-23 23-24 21-25 16-26 12-27 13-28 24-29 27-30 27-31 29-32 30-33 29-34 31-35 33-36 25-37 31-38 34-40 +1-0 1-1 2-3 4-4 14-5 6-6 7-7 8-8 11-9 13-10 14-11 15-12 22-13 21-14 21-15 21-16 24-17 20-18 28-19 15-20 28-21 29-22 31-23 30-24 32-25 34-26 35-27 35-28 36-29 +0-0 2-2 2-3 6-4 5-5 6-6 10-7 9-8 10-9 10-10 12-11 14-12 13-13 13-14 13-15 17-16 18-17 +0-0 1-1 2-2 4-4 5-5 6-6 5-7 10-8 9-9 10-10 11-11 21-12 16-13 17-14 19-15 17-16 17-17 16-18 19-19 19-20 22-22 22-23 29-24 26-25 25-26 26-27 27-28 28-29 30-30 31-31 30-32 31-33 33-34 +3-0 3-1 5-2 5-3 5-4 1-5 6-6 8-7 5-8 8-9 8-10 15-11 6-12 7-13 13-14 17-15 19-16 17-17 20-18 21-19 21-20 27-21 25-23 26-24 32-25 30-26 31-27 34-29 35-31 34-32 37-33 38-34 40-35 41-36 40-37 44-38 39-39 45-40 45-41 +2-0 3-1 3-2 5-3 6-4 8-5 9-6 10-7 11-8 14-9 15-10 17-11 +0-0 1-1 5-3 2-5 6-6 8-7 9-8 10-9 13-10 11-11 13-12 13-13 14-14 16-15 17-16 18-17 19-18 21-19 20-20 21-21 22-22 24-23 +0-0 1-1 2-2 4-3 6-4 4-5 7-6 8-7 5-8 10-9 11-10 12-11 10-12 13-13 13-14 14-15 +0-0 2-1 4-2 4-3 5-4 3-5 7-6 9-7 10-8 13-9 14-10 15-11 17-12 18-13 15-14 19-15 +1-0 2-1 1-2 3-3 5-4 5-5 6-6 7-7 8-8 9-9 +1-0 2-1 +0-0 0-1 1-2 3-3 3-4 4-5 6-6 7-7 8-8 +0-0 1-1 1-2 1-4 2-5 4-7 5-8 6-9 5-10 8-11 7-12 7-13 6-14 8-15 8-16 9-17 10-18 +1-0 1-1 3-2 4-3 5-4 4-5 8-6 9-7 10-8 11-9 11-10 12-11 13-12 +0-0 1-1 2-2 4-3 3-4 6-5 7-6 3-7 3-8 2-10 9-11 11-12 13-13 17-14 19-16 24-18 25-19 16-21 24-22 23-23 28-24 21-25 27-27 32-28 27-29 21-30 27-31 37-32 36-33 29-34 29-36 27-37 35-39 34-40 35-41 36-42 38-43 38-44 38-45 41-46 +2-0 1-1 4-2 5-3 8-5 9-6 10-7 12-9 13-10 8-11 5-12 12-13 4-15 16-17 19-18 20-19 22-20 24-22 16-23 25-24 22-26 24-28 28-30 29-32 29-33 31-34 31-35 33-36 33-37 38-39 37-40 40-42 43-44 35-45 42-47 44-48 35-49 42-50 45-51 46-52 50-53 50-54 50-55 50-56 52-58 54-60 52-61 55-62 +2-0 2-1 3-2 0-4 6-5 7-6 9-7 9-8 13-9 13-10 15-11 16-12 17-13 +0-0 1-1 7-2 4-3 3-4 3-5 6-6 5-7 8-9 7-10 10-11 11-12 14-13 14-14 14-15 10-16 10-17 32-18 15-19 20-20 17-22 19-23 19-24 20-25 22-26 22-27 24-28 20-29 23-30 25-32 25-33 27-34 27-35 29-37 31-38 32-39 33-41 33-43 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 2-7 13-8 0-9 5-10 10-11 11-12 10-13 14-14 13-15 15-16 16-17 16-18 15-19 19-20 20-21 19-23 23-24 21-27 26-28 15-31 28-32 28-33 31-34 29-35 32-36 +3-1 4-2 5-3 4-4 7-5 5-6 7-7 8-9 10-10 1-11 2-12 15-13 12-14 15-16 21-17 19-19 20-20 20-21 27-24 22-25 23-26 24-27 28-30 29-31 31-32 31-33 18-34 34-35 53-36 34-37 36-38 37-39 47-40 38-41 40-42 41-43 42-44 43-45 44-46 43-47 48-48 50-49 49-50 51-52 52-53 53-54 54-55 +0-0 2-1 2-2 4-3 4-4 6-5 7-6 +0-0 0-1 2-2 3-3 6-4 4-5 5-6 7-7 8-8 8-9 8-10 9-11 9-12 12-13 13-14 12-15 16-16 17-17 14-18 18-19 16-20 17-21 18-22 19-23 +0-0 2-1 3-2 4-3 7-4 4-6 8-7 9-8 9-9 11-10 12-11 +0-0 0-1 2-2 4-3 6-4 13-5 6-6 7-7 7-8 13-9 11-10 10-11 12-12 15-13 17-14 17-15 17-16 19-17 +0-0 3-1 3-2 6-3 7-4 4-5 5-6 8-7 8-8 11-9 12-10 12-11 11-12 11-13 14-15 18-16 19-17 20-18 19-19 20-20 21-21 21-22 +0-0 2-1 3-2 4-3 4-4 6-5 7-6 6-7 5-8 8-9 10-10 10-12 11-13 +0-0 1-1 2-2 1-3 2-4 2-5 3-6 4-7 4-8 4-9 5-10 6-11 7-12 4-13 4-14 5-15 9-16 9-17 10-18 12-19 11-21 15-26 14-27 14-28 15-29 16-30 17-31 17-32 +0-0 2-1 2-2 3-3 3-4 5-5 6-6 6-7 8-8 10-9 10-10 13-11 13-12 14-13 14-14 17-15 13-16 20-17 19-19 20-21 22-23 +0-0 4-2 5-3 6-4 5-5 7-6 10-7 8-8 13-9 10-10 10-11 12-13 13-14 15-15 19-16 20-17 13-18 19-19 22-20 17-21 23-23 26-25 27-27 28-28 30-29 31-30 +0-0 1-1 2-2 3-3 3-4 5-6 5-7 7-8 8-9 8-10 6-11 8-12 9-13 11-14 10-15 14-16 13-17 12-18 16-19 15-20 +0-0 1-1 1-2 2-3 4-4 5-6 9-7 6-8 9-9 8-10 11-11 12-12 11-13 13-14 14-15 12-16 16-18 17-19 18-20 +0-0 0-1 1-3 4-4 1-5 6-6 9-7 9-8 0-9 15-10 7-11 8-12 15-14 19-15 18-16 18-17 16-18 21-19 25-21 25-22 27-23 29-24 33-25 26-26 29-27 31-28 32-29 32-30 30-31 +0-0 3-1 3-2 2-3 5-4 6-5 1-6 3-7 7-8 7-9 9-10 12-12 13-13 14-14 15-15 16-16 15-17 16-18 17-19 +0-0 0-1 0-2 1-3 2-4 2-5 3-6 3-7 +0-0 1-1 1-3 4-4 4-5 3-6 8-8 7-9 8-10 6-11 7-12 11-13 14-14 17-15 14-16 15-17 17-19 18-20 19-21 20-22 21-23 20-25 23-26 +1-0 1-1 1-2 2-3 4-4 4-5 5-6 6-7 7-8 8-9 9-10 10-11 +1-0 4-1 4-2 3-3 6-4 5-5 9-6 9-7 7-8 10-9 11-10 13-11 14-12 9-13 15-14 17-15 18-16 17-17 18-18 19-19 15-20 21-22 23-23 27-24 28-25 25-26 27-27 28-28 29-29 30-30 +0-0 1-1 0-2 5-3 6-4 5-5 8-6 7-7 10-8 9-9 12-10 13-11 11-12 13-13 14-14 14-15 16-16 16-17 17-18 18-19 +0-0 1-1 3-2 2-3 4-4 4-5 14-6 10-7 10-8 10-9 18-10 2-11 17-12 13-13 14-14 15-15 22-17 22-18 16-19 21-20 21-21 21-22 22-23 21-24 21-25 30-26 28-27 30-28 30-29 31-30 30-31 36-33 35-34 36-35 36-37 37-38 39-39 42-41 43-42 40-43 43-44 44-45 45-46 47-47 47-49 36-50 54-51 54-52 +6-0 2-1 8-2 5-4 2-5 4-7 8-8 8-9 10-10 11-11 12-12 13-13 15-14 16-15 17-16 19-17 +0-0 1-1 0-2 6-3 1-4 2-5 10-6 4-7 8-8 10-9 9-10 7-11 11-12 10-13 11-14 17-15 11-16 17-17 17-18 19-19 17-20 20-21 18-22 21-23 +0-0 1-1 2-2 2-3 3-4 4-5 9-7 6-8 8-9 4-10 9-11 9-12 9-13 10-14 +0-0 1-1 2-2 2-3 3-4 6-5 7-6 9-7 8-8 9-9 11-10 10-11 13-12 14-13 15-14 16-15 +0-0 1-1 6-2 7-3 5-4 8-5 5-6 8-7 10-8 15-9 13-10 11-11 11-12 13-13 12-14 16-15 18-16 17-17 18-18 20-19 21-20 25-21 19-22 20-23 24-24 25-25 24-26 26-27 27-28 +0-0 1-1 6-2 5-3 3-4 5-5 4-6 5-7 6-8 7-9 9-10 8-11 12-13 10-14 17-15 15-16 11-17 15-18 14-19 17-20 14-21 17-22 16-23 19-24 18-25 18-26 20-27 +0-0 1-1 1-2 3-3 4-4 9-5 6-6 6-7 8-8 11-9 12-10 13-11 14-12 15-13 15-14 17-15 20-16 20-17 20-18 20-19 24-20 25-21 23-22 26-23 27-24 28-26 22-27 29-28 26-29 31-30 35-31 36-32 37-33 36-34 33-35 40-37 41-38 42-39 35-40 45-41 46-42 47-43 47-44 48-45 +0-0 1-1 +0-0 2-1 1-2 3-3 5-4 6-5 6-6 8-7 9-8 +0-0 3-1 2-2 2-4 5-5 3-6 6-7 6-8 10-9 12-10 10-11 11-12 8-13 12-14 15-15 13-16 17-17 17-18 19-19 18-20 19-21 5-22 20-23 21-24 23-25 22-26 +1-1 2-2 3-3 6-4 5-5 6-6 8-7 9-8 11-9 12-10 15-11 14-12 21-13 23-14 16-15 19-16 13-17 21-18 21-19 29-20 23-21 25-22 27-23 27-24 28-25 28-26 23-27 31-28 +0-0 3-1 5-2 3-3 3-4 4-5 7-6 7-7 8-8 10-9 10-10 13-11 12-12 14-13 15-14 16-15 17-16 +1-0 0-1 1-2 2-3 4-4 5-5 5-6 7-7 9-8 7-9 6-10 12-11 13-12 11-13 14-14 16-15 15-16 17-17 11-18 16-19 18-20 17-21 19-22 20-23 +0-0 1-1 2-2 2-3 4-4 5-5 8-7 7-8 9-9 11-10 10-11 13-12 13-14 13-15 14-16 16-17 17-18 15-19 18-20 23-22 22-23 25-24 23-25 24-26 28-27 29-28 29-29 29-30 29-31 30-32 26-33 34-34 34-35 33-36 35-37 35-38 +0-0 3-1 0-2 4-3 5-4 6-5 6-6 8-7 9-8 12-9 12-10 13-11 +0-0 1-1 2-2 3-3 4-4 8-5 5-6 6-7 7-8 9-10 10-11 12-12 11-13 13-14 14-15 +0-0 1-1 2-2 5-3 12-4 9-5 5-6 6-7 6-8 8-10 9-11 10-12 14-13 15-14 12-15 15-16 19-17 11-18 21-19 22-21 18-22 25-23 23-24 25-25 23-26 27-27 28-28 +5-0 0-1 7-2 4-3 4-4 4-5 1-6 2-7 10-8 8-9 10-10 9-11 12-12 10-13 13-14 15-15 13-16 16-17 17-18 16-19 17-20 18-21 +5-0 6-1 4-2 1-3 5-4 7-6 7-7 4-8 12-10 13-11 14-12 15-13 16-14 16-15 21-16 22-17 21-18 20-19 25-20 17-21 24-22 30-23 18-24 28-25 27-26 30-27 30-28 31-29 +0-0 1-1 +4-0 2-1 11-3 1-4 5-5 17-6 8-8 11-9 24-10 15-11 11-12 9-13 11-14 15-15 19-16 17-17 30-18 11-20 21-21 19-25 22-26 27-27 22-29 23-30 24-31 29-33 35-34 36-35 35-36 39-39 39-41 39-42 41-43 42-44 40-45 41-46 45-47 45-48 46-49 48-50 49-51 50-52 52-54 +0-0 0-1 2-2 3-3 3-4 6-5 6-6 5-7 4-8 8-9 9-10 9-11 10-12 +14-0 6-1 7-2 4-3 3-4 3-5 8-6 9-7 9-8 11-9 15-10 19-11 16-12 17-13 19-14 15-15 20-16 18-17 26-18 23-19 23-20 27-21 26-22 26-23 29-25 28-26 32-27 33-28 +0-0 0-1 1-2 2-3 3-4 5-5 1-6 6-7 7-8 8-9 12-10 6-11 9-12 10-13 12-14 12-16 12-17 15-18 21-19 22-20 16-21 10-22 11-23 20-24 20-28 20-29 23-30 +1-0 1-1 2-2 7-3 4-4 4-5 5-6 10-7 9-8 8-9 9-10 9-11 14-12 14-14 15-15 16-16 17-17 19-18 20-19 12-20 21-21 +6-0 7-1 8-2 7-3 2-4 4-5 9-6 10-7 11-8 16-9 15-10 18-11 19-12 20-13 21-14 22-15 23-16 24-17 +0-0 3-1 9-2 6-3 2-4 2-5 6-6 7-7 7-8 11-9 9-10 12-11 13-12 15-13 14-14 16-15 17-16 17-17 18-18 +0-0 0-1 1-2 3-3 4-4 +0-0 0-1 4-2 1-3 3-4 4-5 7-6 1-7 5-8 6-9 13-10 11-11 12-13 10-14 11-15 12-16 13-17 14-18 +0-0 1-1 3-2 3-3 9-4 8-5 14-6 10-7 6-8 12-9 16-10 10-11 15-12 14-13 16-14 16-15 20-16 19-17 38-18 21-19 22-21 28-22 25-23 23-24 24-25 26-26 31-27 33-28 29-29 28-30 31-31 32-32 37-33 36-34 40-35 40-36 41-37 +0-0 1-1 2-2 3-3 5-4 7-5 3-6 9-7 10-8 11-9 11-10 12-11 12-12 18-14 14-15 12-16 20-17 19-18 20-19 21-20 22-21 23-22 24-23 26-24 25-25 28-26 29-27 +0-0 2-1 2-2 3-3 3-4 4-5 7-6 6-7 6-8 9-9 5-10 7-11 9-12 13-13 7-14 9-15 14-18 16-19 12-20 16-21 15-22 15-23 16-24 15-25 17-26 +5-0 4-1 3-2 1-3 6-4 8-5 6-6 8-7 12-8 13-9 10-10 11-11 13-12 17-13 21-14 16-15 18-16 29-17 25-18 28-19 29-20 30-21 32-22 33-23 30-24 36-25 36-26 37-27 +0-0 0-1 4-2 4-3 6-4 7-5 3-6 9-7 7-8 11-10 12-11 11-12 10-13 13-14 10-15 14-16 16-17 15-18 16-19 20-20 17-21 19-22 9-23 21-24 23-25 22-26 22-27 24-28 25-29 26-30 +1-0 1-1 7-2 8-3 4-4 8-5 3-6 3-7 9-8 6-9 12-10 10-11 12-12 13-13 14-14 16-15 16-16 17-17 18-18 19-19 +1-0 1-1 1-2 3-3 6-4 8-6 10-7 10-8 13-9 15-10 16-11 15-12 19-13 19-14 20-15 22-16 23-17 +0-0 2-1 2-2 3-3 4-4 9-5 9-6 11-8 12-9 23-10 24-11 18-12 19-13 17-14 22-15 28-16 22-17 25-18 20-19 31-20 32-21 33-22 27-23 33-24 34-25 +0-0 0-1 1-2 +0-0 1-1 4-2 12-3 5-4 5-5 6-6 7-7 8-8 11-9 8-10 10-11 12-12 11-13 16-15 16-16 19-17 17-18 20-19 20-20 22-21 23-22 23-23 26-24 19-27 27-28 28-29 31-30 31-31 30-33 32-34 33-35 33-36 35-37 +0-0 3-1 4-2 4-3 7-4 5-5 9-6 9-7 10-9 9-10 11-11 13-12 15-13 16-14 17-15 14-16 19-17 18-18 18-19 15-20 21-21 22-22 23-23 24-24 +0-0 3-2 1-3 2-5 6-6 5-7 8-8 7-9 10-10 8-11 10-12 11-13 11-14 12-15 13-16 15-17 16-18 17-19 13-20 17-21 17-22 20-23 19-24 21-25 +4-1 8-2 4-3 13-4 9-5 9-6 12-7 14-8 10-10 19-11 19-12 21-13 18-14 24-15 26-16 +0-0 1-1 +2-0 1-1 2-2 1-3 0-4 4-5 5-6 5-7 6-8 6-9 6-10 9-11 10-12 16-13 17-14 4-15 13-16 14-17 15-18 9-19 17-20 18-21 +0-0 1-1 2-3 14-4 4-5 6-6 5-7 5-8 13-9 12-10 12-11 14-12 15-13 16-14 +0-0 1-1 5-2 3-3 4-4 5-5 7-6 7-7 9-9 11-10 10-11 11-12 12-13 18-14 15-15 15-16 16-17 17-18 24-19 19-20 18-21 22-22 23-23 24-24 25-25 25-26 28-27 28-28 29-29 30-30 +3-0 2-2 2-3 1-4 5-5 6-6 7-7 7-8 8-9 9-10 10-11 9-12 13-13 10-14 14-15 14-16 15-19 16-20 16-21 17-22 17-23 18-24 +1-0 1-1 3-2 0-3 4-4 3-5 4-6 6-7 8-8 9-9 10-10 11-11 12-12 13-13 13-14 15-15 19-16 17-17 21-18 18-19 20-20 22-21 22-22 23-23 +0-0 4-1 2-2 2-3 6-4 6-5 3-7 8-8 10-9 11-10 14-11 14-12 15-13 +0-0 11-1 3-2 4-3 6-4 12-5 4-6 11-7 14-8 16-9 17-10 20-13 18-14 20-15 19-16 26-17 26-18 27-19 +1-0 3-1 1-2 0-3 3-4 7-5 5-6 6-7 6-8 8-9 6-10 10-11 12-12 10-13 13-14 +1-0 2-1 3-2 3-3 4-4 6-5 4-6 11-7 9-8 9-9 10-10 14-11 12-12 8-13 15-14 16-15 +1-0 1-1 4-2 6-3 7-4 +13-1 3-2 12-3 6-4 7-5 7-6 8-7 12-8 0-9 11-10 2-11 13-12 10-15 25-16 26-17 17-18 24-19 21-20 22-21 25-22 26-23 27-24 +2-0 3-1 5-2 3-3 6-5 7-6 10-7 9-8 12-9 12-10 16-11 13-12 14-13 16-14 13-15 19-16 20-17 21-18 22-19 24-20 25-21 +0-0 0-1 2-2 3-3 2-4 4-5 6-6 7-7 9-8 7-9 11-10 12-11 13-12 12-13 15-14 16-15 16-16 17-17 18-18 16-19 16-20 22-21 22-22 22-23 23-24 +0-0 2-1 2-2 3-3 3-4 3-5 10-6 9-7 14-8 16-9 14-10 16-11 20-12 21-13 22-14 24-15 +1-0 1-1 3-2 4-3 5-4 6-5 8-6 11-7 12-8 13-9 14-10 16-11 17-12 19-13 22-14 22-15 21-16 25-17 27-18 26-19 28-20 +0-0 2-1 2-2 0-3 3-4 4-5 4-6 8-7 4-8 6-9 9-10 10-11 11-12 10-13 12-14 13-15 15-16 13-17 14-19 18-20 17-21 17-22 18-23 19-24 +1-0 1-1 4-2 2-3 3-5 7-6 6-7 5-8 8-9 9-10 10-11 11-12 12-14 13-15 13-16 14-17 16-18 15-19 15-20 18-21 19-22 21-23 22-24 23-25 20-26 20-27 24-28 +0-0 1-1 3-2 3-3 4-4 5-5 6-6 +0-0 1-1 7-2 1-3 2-4 3-5 6-6 4-7 5-8 6-9 8-10 8-11 9-12 10-13 8-14 11-15 12-17 14-18 16-20 18-21 15-22 16-23 19-24 20-25 21-26 21-27 21-28 21-29 23-30 24-31 +2-0 1-1 2-2 3-3 3-4 5-5 7-6 6-8 10-9 19-10 6-11 9-12 11-13 13-14 13-15 12-16 13-17 14-18 15-19 16-20 18-21 19-22 19-23 22-24 22-25 18-26 22-27 24-29 26-30 27-31 +0-0 4-2 4-3 6-4 2-5 6-6 7-7 8-8 14-9 11-11 12-12 12-13 16-14 15-15 17-16 18-17 16-18 13-19 21-20 24-21 23-22 24-23 25-24 26-25 27-26 28-27 29-28 29-29 30-30 +0-0 0-1 0-2 5-3 4-4 5-5 6-6 6-7 8-8 9-9 7-10 9-11 10-12 11-13 12-14 12-15 13-16 14-17 16-18 +0-0 1-2 7-3 1-4 6-5 5-6 6-7 7-8 8-9 7-10 10-11 10-12 15-15 14-17 15-18 16-19 20-20 18-21 20-22 21-23 21-24 22-25 23-26 25-27 23-28 26-29 25-30 25-32 29-33 30-34 +0-0 6-1 5-2 2-3 2-4 3-5 7-6 8-7 6-8 9-10 10-11 11-12 12-13 13-14 13-15 15-16 16-17 17-18 18-19 +1-0 1-1 2-2 3-3 5-4 5-5 7-6 7-7 8-9 11-10 16-11 11-12 11-13 14-14 15-15 18-16 15-17 13-18 21-19 19-20 20-21 19-22 22-23 +0-0 1-1 2-2 5-3 4-4 8-5 5-6 8-7 6-8 12-9 11-10 10-11 12-12 12-13 13-14 14-15 14-16 17-17 14-18 20-19 11-20 19-21 23-22 17-23 22-24 27-25 23-26 29-28 29-31 30-32 31-33 35-34 29-35 32-36 34-37 36-39 +0-0 2-1 1-2 1-3 6-5 8-6 7-7 2-8 3-9 4-10 9-11 8-12 12-13 15-14 13-15 13-16 14-17 15-18 16-19 +2-0 0-1 3-2 4-3 2-4 7-5 6-6 5-7 8-8 8-9 14-10 12-11 12-12 15-13 8-14 13-15 14-16 17-17 16-18 17-19 18-20 19-21 20-22 21-23 22-24 17-25 21-26 24-27 25-28 26-29 +1-0 1-1 1-2 3-3 5-4 4-5 10-6 8-7 8-8 8-9 8-10 7-11 11-12 11-13 12-15 12-16 11-18 15-19 16-20 17-21 17-22 19-23 18-24 19-25 +0-0 1-1 5-2 6-3 4-4 6-5 4-6 2-7 9-8 +2-0 2-1 2-2 15-3 6-5 8-6 8-7 1-8 10-9 11-10 9-11 14-12 16-13 7-14 19-15 18-16 11-17 20-18 21-19 +0-0 0-1 2-2 3-3 3-4 5-5 4-6 6-7 7-8 8-9 8-10 10-11 11-12 12-13 +0-0 1-1 2-2 6-3 5-4 6-5 6-6 9-7 10-8 5-9 15-11 11-12 14-13 13-14 14-15 17-17 16-18 13-19 22-20 22-21 20-22 21-23 22-24 23-25 24-26 25-27 +0-0 0-1 1-2 3-3 3-4 4-5 +0-0 0-1 2-2 4-3 4-4 4-5 4-6 7-7 7-8 6-9 9-10 10-11 8-12 11-13 12-14 +0-0 1-1 2-2 2-3 5-4 5-5 5-6 7-7 7-8 +0-0 1-1 2-2 4-3 5-4 17-5 6-7 8-8 14-9 15-10 10-11 11-13 12-14 13-16 13-17 17-19 18-20 18-21 24-23 20-24 20-25 4-26 20-28 22-29 25-30 22-31 24-32 23-33 27-34 28-35 28-36 +0-0 1-1 3-2 3-3 2-4 6-5 6-6 7-7 7-8 11-9 15-10 10-11 11-12 18-13 16-15 15-16 13-17 14-18 18-19 19-20 +0-0 1-1 2-2 1-3 4-4 2-5 5-6 6-7 7-8 8-9 9-10 10-11 11-12 13-13 12-14 15-15 14-16 14-17 16-18 +0-0 1-1 2-2 4-4 4-5 6-6 7-7 8-8 10-10 11-11 11-12 15-13 12-14 16-15 8-16 14-17 16-18 20-20 21-23 24-25 26-26 27-27 27-28 28-29 29-30 +0-0 4-1 2-3 4-4 5-5 13-6 9-7 10-8 8-10 12-11 12-12 13-13 13-14 13-15 14-16 17-17 13-18 18-19 17-20 19-21 +0-0 2-1 3-2 3-3 3-4 4-5 5-6 5-7 6-8 6-9 8-10 +4-0 3-1 2-2 4-3 4-4 3-5 4-6 2-7 7-8 6-9 6-10 8-11 8-12 10-13 8-14 10-15 10-16 10-17 10-18 15-19 13-20 19-21 14-22 15-23 16-26 18-27 21-28 17-29 21-30 22-31 +0-0 0-1 1-2 3-3 4-4 4-5 7-6 6-7 6-8 7-9 +1-0 4-1 0-2 6-3 2-4 5-5 1-6 8-7 7-8 10-9 13-10 13-11 11-12 15-13 15-14 14-15 15-16 19-17 19-18 20-19 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 8-7 9-8 10-9 11-10 15-11 17-12 18-13 19-14 17-16 22-17 12-18 21-19 24-20 24-21 26-22 26-23 22-24 25-25 28-26 29-27 +0-0 1-1 8-2 9-3 2-4 2-5 2-6 9-7 17-8 12-9 19-10 9-11 22-12 12-13 23-16 18-18 18-19 23-20 29-21 25-22 28-23 28-25 34-26 31-27 19-28 32-29 32-30 34-31 38-32 34-33 39-34 36-36 41-38 42-39 42-40 43-41 45-42 46-43 47-44 48-45 +0-0 1-1 2-2 7-3 6-4 6-5 7-6 6-7 9-8 4-9 7-10 12-11 12-12 12-13 14-14 13-15 16-16 +0-0 6-2 3-3 4-4 2-5 3-6 7-7 8-8 +0-0 1-1 3-2 4-3 5-4 4-5 6-6 14-7 11-8 13-9 10-10 15-11 16-12 +0-0 1-1 2-2 5-4 7-5 9-6 9-7 11-8 10-9 13-10 14-11 +0-0 0-1 2-2 4-3 4-4 5-6 4-7 5-8 7-9 9-10 10-11 11-12 11-13 12-14 13-15 14-16 12-17 16-18 5-19 18-20 17-21 18-22 19-23 20-24 +0-0 1-1 1-2 4-3 3-4 3-5 7-6 8-7 10-8 11-9 10-10 17-11 15-12 12-13 15-14 14-15 20-16 19-17 22-18 19-19 21-20 21-21 24-22 24-23 25-24 +3-0 3-1 4-2 6-3 7-4 8-5 9-7 9-8 11-9 10-10 10-11 15-12 12-13 16-15 17-16 +0-0 1-1 2-2 3-3 6-4 6-5 6-6 8-7 8-8 11-9 12-10 20-11 13-12 23-13 22-15 20-16 23-18 22-19 25-20 +0-0 2-1 0-2 6-3 2-4 3-5 4-6 7-7 8-8 6-9 7-10 15-11 15-12 9-13 12-14 16-15 14-16 19-17 18-18 15-19 22-20 18-21 23-22 +0-0 1-1 1-2 2-3 3-4 3-5 6-6 6-7 6-8 6-9 8-10 +0-0 1-1 4-2 3-3 4-4 5-5 6-6 6-7 7-8 9-9 9-10 11-11 12-12 12-13 12-14 14-15 13-16 18-17 6-18 23-19 19-20 22-21 23-22 24-23 +0-0 5-1 2-2 1-3 4-4 5-5 7-6 7-7 6-8 6-9 7-10 8-11 7-12 10-13 4-14 14-15 14-16 13-17 13-18 13-19 23-20 16-21 18-22 18-23 16-24 21-25 18-26 19-27 20-28 20-29 23-30 23-31 23-32 24-33 +0-0 +0-0 0-1 5-2 4-3 4-4 6-5 7-6 8-7 9-8 10-9 +0-0 1-1 4-2 6-3 11-4 8-5 10-6 10-7 13-8 +0-0 0-1 2-2 0-3 3-5 3-6 5-7 3-8 4-9 6-10 5-11 6-12 7-13 7-14 8-15 +0-0 2-1 0-2 4-4 5-5 6-6 8-7 11-9 11-10 12-11 +0-0 1-1 1-2 2-3 2-4 2-5 5-6 5-8 7-9 8-10 8-11 8-12 6-13 9-14 10-15 11-16 +0-0 1-1 3-2 1-3 2-4 8-11 11-12 11-14 12-15 13-17 15-18 15-19 19-21 8-22 18-24 19-25 21-26 16-27 22-28 20-29 22-30 20-31 25-32 20-33 24-34 26-35 27-36 28-37 30-38 30-39 31-40 +0-0 2-1 2-2 4-3 5-4 5-5 7-6 9-7 6-8 9-9 10-10 10-11 12-12 12-13 13-14 15-15 15-16 12-17 18-19 19-20 20-21 +0-0 2-1 1-2 4-3 3-4 2-5 6-7 8-8 6-10 6-12 11-13 12-14 14-15 15-16 15-17 15-18 16-19 +0-0 1-1 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 +0-0 1-1 2-2 6-3 7-4 8-5 6-6 9-7 10-8 11-9 8-10 9-11 14-12 12-13 13-15 14-16 15-17 16-18 17-19 +0-0 1-1 3-2 4-3 4-4 7-5 8-6 +0-0 1-1 2-2 4-3 4-4 5-5 6-6 +0-0 2-1 +0-0 0-1 2-2 3-3 2-4 4-5 4-6 8-7 4-8 5-9 9-10 9-11 9-12 8-13 10-14 11-16 12-17 13-18 +1-0 1-1 5-2 2-3 1-4 1-5 2-6 8-8 11-9 12-10 8-11 9-12 10-13 11-14 13-15 14-16 15-17 15-18 16-19 16-20 19-22 21-23 22-24 22-25 23-26 24-27 +0-0 1-1 2-2 5-4 4-5 8-7 9-8 9-9 10-10 12-11 18-12 14-13 17-14 15-15 16-16 19-17 +0-0 1-1 0-2 2-3 2-4 3-5 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 +2-0 0-1 3-2 4-3 5-4 6-5 6-6 9-7 7-8 10-9 12-10 15-11 11-12 17-14 18-15 16-16 20-17 20-18 21-19 +2-0 0-1 3-3 4-4 9-6 5-7 9-8 12-9 9-11 9-12 7-13 8-14 9-15 10-16 13-17 18-18 9-19 14-20 16-21 15-22 18-23 20-24 14-25 20-27 20-28 20-30 20-31 22-33 20-34 26-35 28-36 29-37 28-38 38-40 24-41 33-43 34-45 33-46 38-47 38-48 38-49 37-50 39-51 +3-0 2-1 3-2 5-3 6-4 6-5 7-6 6-7 7-8 8-9 8-10 11-11 21-12 13-13 13-14 13-15 13-16 14-17 17-18 18-20 21-21 24-22 25-23 25-24 20-25 26-26 28-27 30-29 16-31 31-32 +0-0 1-1 1-2 5-3 4-4 3-5 6-7 8-8 9-9 9-10 13-12 12-13 13-14 13-15 15-16 15-17 16-18 +1-0 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 7-7 8-8 +0-0 1-1 2-2 4-3 3-4 5-5 5-6 9-7 8-8 7-9 8-10 10-11 11-12 +0-0 1-1 1-2 2-3 2-4 3-5 4-6 5-7 6-8 7-9 8-10 9-11 10-12 +0-0 2-1 4-2 2-3 8-4 4-5 4-6 6-8 7-9 3-10 8-11 10-12 9-13 10-14 11-15 14-16 13-17 14-18 14-19 16-20 16-21 17-22 +0-0 0-1 1-2 2-3 3-4 4-5 5-6 5-7 8-8 6-9 7-10 9-11 10-12 9-13 10-14 12-15 12-16 13-17 +0-0 1-1 3-2 2-3 4-4 5-5 8-6 7-7 8-8 7-9 12-10 9-11 15-12 16-13 15-14 16-15 15-16 +0-0 0-1 0-2 +0-0 1-1 1-2 1-3 3-4 3-5 4-6 3-7 4-8 5-9 6-10 5-11 5-12 8-13 8-14 8-15 8-16 10-18 10-19 11-20 +0-0 1-1 7-3 3-4 4-5 4-6 6-7 8-8 8-9 9-10 10-11 11-13 13-15 10-16 11-17 14-18 16-19 17-20 17-21 15-22 17-23 18-24 17-25 21-26 20-27 22-28 23-29 24-30 +0-0 0-1 3-2 8-3 5-4 3-5 10-6 9-7 9-8 9-9 12-10 11-11 11-13 15-14 +0-0 3-1 3-2 4-3 5-4 6-5 7-6 9-7 11-8 9-9 12-10 +1-0 1-1 3-2 3-3 5-5 8-6 5-7 9-8 9-9 8-10 12-13 13-15 17-16 18-17 17-18 18-19 18-20 21-21 29-22 21-23 22-24 12-25 23-26 19-27 28-28 27-29 21-30 27-31 30-32 30-33 31-34 +0-0 3-1 4-2 2-3 6-4 5-5 5-6 6-7 7-8 8-9 +2-0 3-1 3-2 10-3 1-4 11-6 11-7 3-8 12-10 13-11 20-14 18-15 16-16 16-17 15-18 20-19 20-20 23-21 22-22 21-23 26-24 22-25 26-26 30-28 30-29 33-30 34-31 35-32 3-33 37-34 +4-0 3-1 4-2 10-3 4-4 4-5 8-6 7-7 1-8 2-9 12-10 5-11 14-12 10-13 18-15 19-16 17-17 21-18 24-20 25-21 26-22 25-23 25-24 22-25 29-26 +0-0 1-1 2-2 3-3 4-4 6-5 5-6 8-7 9-8 10-9 10-10 12-11 13-12 14-13 +4-0 2-1 0-2 5-3 2-4 8-5 7-6 9-7 9-8 10-9 16-10 12-11 13-12 14-13 16-14 17-15 +0-0 2-1 4-2 2-3 6-4 0-5 1-6 3-7 7-8 7-9 3-10 9-11 10-12 11-13 11-14 12-15 13-16 +0-0 0-1 1-2 2-3 2-4 2-5 4-6 5-7 +0-0 2-1 2-2 5-3 3-4 6-5 4-6 6-7 7-8 7-9 10-10 8-11 9-12 10-13 11-14 12-15 13-16 14-17 16-18 20-20 17-21 19-22 21-23 16-24 17-25 23-26 24-27 +0-0 2-1 3-2 4-3 4-4 8-5 5-6 8-7 10-8 7-9 11-10 12-11 12-12 14-13 +4-0 1-1 2-2 3-3 4-4 6-5 7-6 9-7 7-8 8-9 6-10 10-11 17-12 9-13 13-14 17-15 16-16 16-17 19-18 18-19 20-20 +0-0 1-1 2-2 2-3 5-4 3-5 4-6 11-7 10-8 15-9 14-10 14-11 12-12 18-13 19-14 20-15 21-16 22-17 26-18 26-19 26-20 32-22 30-23 31-24 33-25 +0-0 2-1 3-2 +2-1 2-2 3-3 6-4 5-5 12-6 8-7 10-8 11-9 11-10 15-11 15-12 15-13 19-15 18-17 19-18 23-19 +0-0 1-1 2-2 3-4 4-5 5-6 5-7 3-8 6-9 7-10 +0-0 1-1 2-2 2-3 6-4 6-5 6-6 7-7 8-8 10-9 12-10 9-11 11-12 16-13 13-14 14-15 19-17 15-18 20-19 19-20 20-21 21-22 23-23 26-24 27-25 25-26 28-27 29-28 30-29 +0-0 1-1 12-2 5-3 4-5 4-7 4-8 11-9 7-10 7-11 9-12 10-13 14-14 15-15 15-16 16-17 14-18 16-19 18-20 18-21 14-22 19-23 20-24 21-25 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 +0-0 1-1 2-2 2-3 4-4 4-5 5-6 5-7 6-8 7-9 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 5-7 7-8 8-9 16-10 11-11 11-12 12-13 10-14 14-15 15-16 17-17 15-18 16-19 17-20 18-21 19-22 18-23 20-24 20-25 22-26 +0-0 1-1 3-2 2-3 2-4 5-5 6-7 6-8 7-9 13-10 8-11 8-12 9-13 11-14 10-16 17-17 13-18 13-20 16-21 16-22 18-23 17-24 20-25 19-26 21-27 20-28 22-29 23-30 24-31 +0-0 0-1 1-2 1-3 3-4 4-5 5-6 7-7 8-8 10-9 8-11 10-12 12-13 11-14 11-15 13-16 14-18 +0-0 1-1 1-2 1-3 5-4 4-5 5-6 6-7 7-8 8-9 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 9-7 10-8 14-9 11-10 12-11 13-12 15-13 18-14 19-15 20-16 21-17 +0-0 1-1 2-2 4-3 4-4 5-5 6-6 8-7 9-8 10-9 10-10 10-11 11-12 12-13 13-14 12-15 15-16 14-17 19-18 20-19 21-20 22-21 24-22 22-23 16-24 25-26 +0-0 1-1 1-2 2-3 3-5 5-6 5-7 6-8 8-9 10-10 10-11 11-12 +0-1 2-2 3-3 4-4 5-5 5-6 9-7 8-8 16-9 11-10 12-11 13-12 12-13 13-14 16-15 18-16 18-17 20-18 22-19 23-20 23-22 26-24 +0-1 3-2 5-3 9-4 7-5 7-6 9-8 11-9 17-10 14-11 16-12 19-13 19-14 19-15 +1-0 2-1 1-2 4-3 0-4 6-5 7-6 9-7 10-8 +0-0 1-1 5-2 3-3 3-4 7-5 5-6 7-7 5-8 6-9 12-11 11-12 12-13 13-14 14-15 15-16 16-17 17-18 19-19 20-20 22-21 18-22 24-23 23-24 25-25 26-26 26-28 27-29 +0-0 1-1 3-2 4-3 5-4 6-5 +0-0 0-1 2-2 2-3 1-4 6-5 5-6 7-7 8-9 9-10 10-11 11-12 10-13 12-14 12-15 13-16 14-17 9-18 16-19 17-20 +0-0 1-1 3-2 4-4 5-5 8-6 10-7 15-8 14-9 12-10 11-11 15-12 19-13 18-14 22-15 20-16 21-17 23-18 +0-0 1-1 1-2 3-3 4-4 3-5 6-6 6-7 7-8 8-9 9-10 10-11 11-13 12-14 13-15 14-16 +0-0 1-1 3-2 6-3 6-4 7-5 6-6 8-7 11-8 11-9 12-10 11-11 13-12 15-13 16-15 17-16 +1-0 1-1 2-2 3-3 4-4 6-5 5-6 5-7 8-8 8-9 10-10 9-11 11-12 12-14 14-15 +4-1 1-2 2-3 2-4 5-5 5-6 7-7 6-8 8-9 8-10 9-11 9-12 11-13 9-14 13-15 12-16 13-17 17-18 16-19 17-20 18-21 19-22 +0-0 0-2 4-3 5-4 4-5 4-6 7-7 6-8 8-9 9-10 15-11 10-12 13-13 11-14 12-15 15-16 16-17 17-18 17-19 18-20 +0-0 2-1 3-2 3-3 5-4 +0-0 1-1 2-2 8-3 10-4 5-5 12-6 6-7 3-8 4-9 13-10 13-11 13-12 14-13 15-14 16-15 17-16 18-17 19-18 20-19 22-20 23-21 24-22 +0-0 1-1 1-2 13-3 3-4 6-6 10-7 12-9 19-10 14-11 13-12 17-14 4-15 19-16 13-17 17-18 20-19 22-20 +3-1 2-2 4-3 5-4 3-5 6-6 10-7 9-8 6-9 9-10 8-11 4-12 11-13 12-14 12-15 21-16 12-17 16-18 15-19 21-21 18-22 20-23 22-24 23-25 23-26 24-27 24-28 32-29 26-30 29-31 24-32 31-33 32-34 31-35 33-36 28-37 29-38 34-39 32-40 +0-0 2-1 2-2 3-3 4-4 5-5 6-6 7-7 6-8 9-9 11-10 11-11 12-12 16-14 19-15 17-16 34-17 22-18 22-19 23-20 19-21 33-23 28-24 28-25 28-26 28-27 29-29 34-30 35-31 36-32 38-33 39-34 40-35 +0-0 1-1 2-2 2-3 12-4 6-5 7-6 7-7 8-8 8-9 7-10 18-11 9-13 10-14 12-15 14-16 17-18 16-19 18-20 7-21 17-22 23-23 23-24 24-25 25-26 26-27 20-28 22-30 27-31 +0-0 1-1 3-2 6-4 15-5 15-6 7-7 12-8 21-9 20-10 6-11 14-12 17-13 18-14 6-15 4-16 22-17 22-18 22-19 22-20 22-21 23-22 22-23 32-24 30-25 32-26 30-27 33-29 35-30 37-31 37-32 +3-0 2-1 1-2 1-3 5-4 3-5 5-6 11-8 12-9 10-10 13-11 13-12 17-13 15-14 14-15 13-16 16-17 20-18 18-19 29-21 25-22 20-23 23-24 25-26 27-27 28-28 24-29 29-30 32-32 33-33 34-34 35-35 36-36 38-37 30-38 39-39 +0-0 0-1 3-2 2-3 3-4 3-5 4-6 4-7 5-8 6-9 +0-0 3-1 3-2 4-3 6-4 6-6 6-7 7-8 10-9 9-10 10-11 13-12 13-13 14-14 12-15 17-16 18-17 20-18 19-19 18-20 20-21 21-22 25-23 26-24 26-25 23-26 26-27 29-28 +0-1 1-2 2-3 3-4 3-5 4-6 5-7 8-8 10-9 6-10 9-11 13-12 11-13 10-14 11-15 13-16 10-17 15-18 16-19 16-20 16-21 19-22 21-23 21-24 20-25 21-26 21-27 23-28 22-29 24-30 +3-0 0-1 0-2 3-3 4-4 2-5 5-6 6-7 12-8 8-9 9-10 11-12 10-13 23-14 14-15 14-16 3-17 16-18 16-19 17-20 19-21 19-22 20-23 22-24 22-25 23-26 23-27 25-28 26-29 24-30 27-31 +0-0 1-1 1-2 3-3 1-4 4-5 7-6 5-7 7-8 4-9 7-10 9-12 10-13 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 9-8 6-9 10-10 12-11 17-16 22-17 21-19 26-20 18-21 19-22 23-23 24-24 26-25 25-26 26-27 27-28 30-29 31-30 31-31 32-32 +0-0 2-1 2-2 4-3 3-5 5-6 8-7 5-8 5-9 7-10 8-11 8-12 9-13 8-14 9-15 9-16 12-17 9-18 13-19 14-21 19-22 16-23 17-24 18-25 17-26 23-27 22-28 10-29 11-30 23-31 15-32 17-33 26-34 23-35 23-36 25-37 27-38 28-39 29-40 30-41 26-42 31-43 +0-0 1-1 3-2 2-3 7-4 4-5 4-6 2-8 3-9 9-11 10-12 13-13 12-14 15-15 11-16 13-17 15-18 14-19 19-21 18-22 21-23 22-24 21-25 26-26 26-27 27-29 25-30 25-31 28-32 28-33 29-34 29-35 30-36 +1-0 0-1 1-2 2-3 5-5 7-6 6-7 7-8 8-9 9-10 10-11 2-12 11-13 10-14 13-15 12-17 18-18 15-19 18-20 16-21 15-22 21-23 21-24 21-25 +0-0 1-1 0-2 6-3 4-4 3-5 5-6 4-8 9-9 10-10 11-11 11-12 13-13 14-14 19-17 18-19 23-20 25-21 26-22 24-23 27-25 28-26 26-27 29-28 30-29 30-30 31-31 +0-0 1-1 2-2 3-3 5-4 6-5 5-6 7-7 12-11 13-12 14-13 15-14 17-15 16-16 19-17 19-18 21-19 +0-0 0-1 1-2 5-3 3-4 4-5 5-6 6-7 7-8 10-9 9-10 14-12 12-13 13-14 22-16 15-17 16-18 17-19 23-20 21-21 18-22 21-23 24-24 25-25 +0-0 1-1 3-2 2-3 3-4 6-5 7-6 7-7 8-8 9-9 9-10 12-11 15-12 13-13 14-14 17-15 17-16 18-17 19-18 +2-0 2-1 3-2 4-3 8-4 7-5 2-6 8-7 7-8 8-9 1-10 11-11 11-12 12-13 16-14 20-16 23-17 23-18 22-19 27-20 18-21 28-22 20-23 25-24 26-25 28-26 29-27 31-28 32-29 19-30 34-31 34-32 35-33 +2-0 1-1 10-2 4-3 3-4 5-5 6-6 5-7 10-9 9-10 22-11 12-14 15-15 16-16 14-17 15-18 17-19 21-20 16-21 25-22 21-24 27-25 28-26 29-27 25-28 30-29 +4-1 17-2 3-3 3-4 4-5 6-6 6-7 7-8 2-9 9-10 2-11 12-12 11-13 12-15 18-16 13-17 21-19 19-20 19-21 19-22 16-23 26-24 21-25 22-26 24-27 18-28 30-29 28-30 32-31 31-32 33-34 29-35 34-36 35-37 +0-0 0-1 11-2 3-3 5-4 7-6 5-7 12-8 10-10 11-11 17-12 12-13 22-14 16-15 17-16 14-17 19-18 18-19 21-20 24-21 25-22 26-23 27-25 +0-0 1-1 2-2 3-3 4-5 5-6 6-7 7-8 6-9 7-10 10-11 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 7-7 7-8 9-9 8-10 10-11 8-12 10-13 13-14 14-15 15-16 16-17 17-18 17-19 18-20 +0-0 2-1 1-2 4-3 12-4 6-5 7-6 6-7 8-8 10-9 12-10 13-11 11-12 18-14 17-15 16-16 16-17 19-18 17-19 18-20 25-21 26-22 23-23 22-24 25-25 27-26 +0-0 8-1 8-2 10-3 1-4 9-5 3-6 11-7 12-8 14-10 16-11 10-12 7-13 18-14 17-15 20-16 24-17 25-18 16-19 26-20 28-21 28-22 21-23 27-24 29-26 34-27 26-28 43-30 34-31 30-32 30-33 31-34 32-35 46-36 36-37 36-38 46-39 43-40 46-41 +0-0 1-1 1-2 2-3 3-4 5-5 3-6 6-7 7-8 7-9 4-10 8-11 9-12 +3-0 1-1 2-2 3-3 7-5 8-6 5-7 6-8 9-9 10-10 23-12 10-13 16-14 16-15 19-16 20-17 24-18 17-19 18-20 27-22 23-23 27-24 25-25 29-26 30-27 32-28 35-29 36-30 39-31 38-32 39-33 46-34 50-35 47-36 48-37 34-38 33-39 43-40 51-41 +0-0 1-1 0-2 3-3 4-4 2-5 3-6 4-7 6-8 7-9 6-10 8-11 9-12 11-13 10-14 11-15 12-16 +0-0 0-1 1-2 1-3 3-4 9-5 5-6 4-7 5-8 6-9 7-10 10-11 10-12 11-13 +0-0 0-1 3-2 2-3 13-4 14-5 7-6 6-7 0-8 1-9 9-10 10-11 4-12 6-13 7-14 8-15 15-16 +6-0 6-1 1-2 0-3 2-4 3-5 11-6 1-7 10-8 11-10 14-11 15-12 12-14 13-15 13-16 12-17 16-18 16-19 16-20 16-21 18-22 20-23 21-24 22-25 16-26 19-27 23-28 23-29 28-30 26-31 27-32 29-33 26-34 28-35 30-36 +0-0 1-1 3-2 3-3 4-4 5-5 2-6 5-7 8-9 9-10 10-11 10-12 11-13 13-14 13-16 14-17 16-18 17-19 15-20 18-21 19-22 20-23 21-24 16-25 22-27 +0-0 3-1 3-2 2-3 4-4 5-5 4-6 6-7 5-8 8-9 8-10 10-11 10-12 11-13 15-14 14-15 14-16 16-17 +0-0 2-1 1-2 1-3 2-6 6-7 5-8 10-9 11-10 9-11 11-12 10-13 12-14 15-15 16-16 14-17 14-18 15-19 14-20 16-21 18-22 20-24 21-25 21-26 22-27 22-28 24-29 25-30 27-32 +7-0 2-1 2-2 1-3 4-4 0-5 16-6 5-7 6-8 6-9 11-10 13-11 13-12 14-14 7-15 15-16 20-17 17-18 18-19 19-20 22-21 23-22 27-25 26-26 28-27 28-28 29-29 +4-1 5-2 2-3 8-4 0-5 10-6 14-7 10-8 8-9 12-10 14-12 13-13 17-14 16-15 16-16 17-17 18-18 20-19 21-20 23-21 22-24 30-25 25-27 29-28 31-29 29-30 30-31 33-32 +0-0 0-1 2-2 2-3 4-4 4-5 5-6 5-7 7-8 8-9 +0-0 6-1 1-2 3-3 4-4 4-5 5-6 5-7 6-8 4-9 10-10 12-11 10-12 11-13 13-14 15-15 15-16 13-17 15-18 18-19 19-20 18-21 20-22 17-23 20-24 20-25 21-27 25-28 25-29 24-30 22-31 28-32 +0-0 0-1 6-2 3-3 4-4 5-5 4-6 5-7 5-8 3-9 7-10 7-11 8-12 12-13 9-14 12-15 13-16 13-17 14-18 15-19 16-20 17-21 +0-0 1-1 2-2 2-3 3-4 3-5 6-6 6-7 5-8 8-9 8-10 10-11 10-12 10-13 12-14 10-15 12-16 13-17 +0-0 1-1 2-2 3-4 4-6 6-7 13-8 5-9 8-10 7-11 9-12 10-13 9-14 8-15 16-16 11-17 6-18 13-19 14-21 15-22 12-25 17-26 18-27 21-28 26-29 27-30 25-31 19-32 29-33 22-34 28-35 28-36 23-37 23-38 28-39 29-40 34-41 33-42 37-43 36-44 37-45 37-46 38-47 +0-0 0-1 3-2 20-3 1-4 6-5 6-6 13-7 9-8 17-9 8-10 11-11 12-12 12-13 17-14 21-15 23-17 24-18 20-19 25-20 29-21 27-22 25-23 28-24 30-25 +0-0 1-1 2-2 3-3 2-4 7-5 7-6 6-7 7-8 10-9 8-10 12-11 10-12 14-13 15-14 15-15 15-16 17-17 20-18 20-19 19-20 20-21 14-22 24-23 +0-0 1-1 2-2 3-3 4-5 8-6 6-8 6-9 12-10 13-11 16-12 12-13 13-14 15-15 17-16 18-18 19-19 20-20 +0-0 8-1 10-2 3-3 4-4 6-5 7-6 12-10 12-11 16-12 13-13 12-14 15-16 20-20 19-21 19-22 21-23 25-26 29-29 30-31 33-32 33-33 29-35 37-36 38-37 34-38 38-39 34-41 41-42 42-43 42-44 42-45 43-46 46-47 +0-0 0-1 1-2 8-3 2-4 3-5 6-6 9-7 8-8 9-9 9-10 12-11 13-12 13-13 14-14 17-15 16-16 18-17 19-18 26-19 23-20 21-21 23-22 22-23 25-24 27-25 27-26 +0-0 0-1 1-2 2-3 2-4 3-5 3-6 4-7 7-8 5-9 6-10 4-11 8-12 8-13 9-14 +0-0 1-1 4-3 6-4 6-5 7-6 10-7 11-8 12-9 15-10 13-11 15-12 18-13 17-14 17-15 16-16 21-17 20-18 23-19 20-20 24-21 26-22 26-23 30-24 32-25 32-26 29-27 34-28 35-30 36-31 37-32 39-33 39-34 42-35 41-36 40-37 43-38 +0-0 1-1 3-2 3-3 4-4 +0-0 0-1 2-2 5-3 8-4 1-5 6-6 3-7 4-8 7-9 6-10 10-12 1-13 13-14 13-15 15-16 19-17 14-18 18-19 16-20 11-21 22-22 21-23 20-24 23-25 +0-0 2-2 2-3 4-4 5-5 7-7 10-8 6-9 10-10 13-12 10-13 16-14 17-15 17-16 19-17 19-18 20-19 21-20 +0-0 0-1 5-2 1-3 2-4 5-5 6-6 7-7 8-8 8-9 9-10 11-11 12-12 11-14 14-15 15-16 18-17 18-19 17-20 17-21 25-22 21-23 19-24 20-25 22-26 25-27 28-29 26-30 28-31 32-32 31-33 33-34 33-35 40-36 38-38 37-39 39-40 38-42 38-43 41-44 42-45 43-46 +0-0 1-1 3-2 3-3 4-4 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 12-11 12-12 13-13 14-14 +0-0 1-1 2-2 3-3 3-4 4-5 4-6 5-7 6-8 7-9 8-10 9-11 10-12 11-13 12-14 12-15 10-16 13-18 13-19 14-20 15-21 16-22 16-23 21-24 19-25 21-26 20-27 21-28 +0-0 1-1 1-2 4-3 4-4 5-5 6-6 6-7 8-8 7-9 9-10 10-11 11-12 +0-0 1-1 2-2 2-3 3-4 3-5 3-6 5-7 6-8 6-9 6-10 8-11 10-12 10-13 9-14 11-15 +0-0 1-1 3-2 4-3 6-4 5-5 6-6 8-7 8-8 10-9 11-10 12-11 +0-0 1-1 4-2 2-3 3-4 8-5 6-6 10-8 12-9 11-10 12-11 13-12 12-13 14-14 15-15 17-16 19-18 17-19 19-20 20-21 19-22 18-23 24-24 +4-0 1-1 3-2 5-3 5-4 8-5 8-6 5-7 5-8 16-9 17-10 5-11 13-12 18-14 10-15 4-16 26-18 24-19 23-20 27-21 29-22 30-23 28-24 21-25 21-26 24-27 16-28 28-29 28-30 37-31 28-32 37-33 29-34 40-35 39-36 38-37 40-38 29-39 42-40 43-41 45-42 35-43 47-45 48-46 48-47 50-48 52-49 47-50 47-51 54-52 +0-0 1-1 1-2 1-3 1-4 4-5 5-6 6-7 5-8 5-9 8-10 9-11 11-12 12-14 12-15 12-16 14-17 15-18 16-19 +9-0 4-1 3-2 1-3 5-4 6-5 7-6 8-7 11-8 6-9 10-10 12-11 16-12 15-13 18-14 16-15 20-16 20-17 21-18 25-19 26-20 13-21 27-22 29-23 30-24 30-25 31-26 +0-0 2-1 1-2 1-3 3-4 4-5 5-6 5-7 7-8 6-10 9-11 11-12 8-13 8-14 10-15 10-16 11-17 12-18 14-19 20-20 16-21 17-22 17-24 21-25 22-26 23-27 20-28 22-29 20-30 24-31 26-32 18-33 19-34 24-35 28-36 28-37 +0-0 1-1 1-2 2-3 3-4 3-5 2-6 4-7 5-8 7-9 7-11 6-12 8-13 10-14 10-15 11-16 +0-0 15-1 16-2 1-3 5-4 11-5 6-6 11-7 7-8 2-9 10-10 4-11 5-12 19-13 20-14 21-15 24-16 17-17 23-19 23-20 26-21 28-22 27-24 32-26 40-27 30-28 34-29 30-30 37-31 38-32 39-33 41-34 40-35 41-36 40-37 42-38 43-40 44-41 48-43 47-45 50-47 50-48 51-49 +0-0 1-1 2-2 4-3 5-4 6-5 2-6 8-7 8-8 9-9 10-10 12-11 13-12 11-13 14-14 17-15 18-16 18-17 19-18 20-19 21-20 +0-0 1-1 3-3 4-4 4-5 4-6 5-7 2-8 7-9 5-10 3-11 8-12 13-13 9-14 10-15 12-16 12-17 11-18 14-19 15-20 +1-0 2-1 0-3 3-4 3-5 7-6 7-7 8-8 9-9 10-10 11-11 12-12 14-13 14-14 14-15 13-16 13-17 19-18 16-19 17-20 19-21 25-22 21-23 22-24 24-25 23-26 25-27 26-28 18-29 26-30 29-31 29-32 30-33 33-34 33-35 33-36 34-37 +2-0 4-1 0-2 1-3 4-4 6-5 6-7 6-8 6-9 8-10 9-11 13-12 14-13 11-14 19-15 16-16 17-18 17-19 17-20 15-21 16-22 18-23 15-24 20-26 22-27 24-28 21-29 25-30 +0-0 1-1 10-2 1-3 3-4 5-5 4-6 6-7 13-8 11-10 11-12 15-13 18-14 16-15 17-16 23-18 20-19 22-20 20-21 24-22 +0-0 3-1 3-2 4-3 3-4 3-5 6-6 7-7 1-8 10-9 10-10 11-11 12-12 +0-0 3-1 6-2 12-3 6-4 12-5 5-6 8-7 17-8 12-9 17-10 22-11 24-12 19-14 25-15 25-16 26-17 27-18 23-19 28-20 +0-0 2-1 2-2 3-3 3-4 5-5 9-6 8-7 9-8 10-9 11-10 12-11 +12-0 1-1 5-2 5-4 4-5 17-6 13-7 16-8 9-9 15-10 15-11 8-12 12-13 21-14 22-15 23-16 24-17 24-18 19-19 20-20 31-21 32-22 8-23 36-24 35-25 37-26 32-27 40-28 35-29 39-30 45-31 43-32 47-33 38-34 43-35 50-37 50-38 48-39 50-40 54-41 54-42 52-44 58-45 +0-0 0-1 3-2 3-3 3-4 2-5 4-6 4-7 5-8 7-9 6-10 7-11 8-12 9-13 10-14 10-15 10-16 11-17 +0-0 0-1 2-2 3-3 7-4 4-5 5-6 9-7 9-8 10-9 12-10 13-11 13-12 11-13 14-14 14-15 14-16 16-17 17-18 18-19 +0-0 3-2 3-3 5-4 6-5 7-6 9-7 9-8 10-9 16-10 19-12 16-13 20-14 21-15 20-16 23-17 22-18 24-19 +2-0 2-1 0-2 6-4 3-5 6-6 6-7 11-8 12-9 12-10 13-11 9-12 7-13 16-15 14-17 18-18 18-20 19-21 20-22 22-23 25-25 26-26 25-27 24-28 27-29 29-30 29-31 30-32 +0-0 1-1 4-2 3-3 4-4 6-5 6-6 9-8 8-9 7-10 8-11 9-12 11-13 13-14 11-15 10-16 14-17 15-18 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 7-8 9-9 10-10 11-11 11-12 13-13 12-14 15-15 14-16 14-17 15-18 23-20 24-21 21-22 26-23 29-24 27-25 28-26 22-27 32-28 26-29 30-30 26-31 34-32 35-33 37-34 28-35 29-36 34-37 34-38 36-41 38-43 +0-0 2-1 4-2 4-3 3-4 5-5 6-6 9-7 7-8 7-9 6-10 10-12 10-13 11-14 12-15 15-16 15-17 16-18 +1-0 1-1 2-2 2-3 5-4 6-5 7-6 8-7 7-8 9-9 11-10 13-11 14-12 16-13 15-14 19-15 12-16 18-18 18-19 17-20 21-21 22-22 22-23 23-24 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 7-7 9-9 10-10 11-11 12-12 13-13 +0-0 0-1 0-2 0-3 3-4 3-5 3-6 +0-0 1-1 1-2 1-3 4-4 5-6 5-7 5-8 7-9 8-10 10-11 10-12 11-13 12-14 +0-0 0-1 0-2 0-3 2-4 1-5 1-6 1-7 2-8 2-9 2-10 3-11 3-12 3-13 3-14 3-15 5-16 5-17 5-18 +0-0 1-1 2-2 5-3 6-4 3-6 7-7 8-9 9-12 11-13 29-14 15-15 16-16 16-17 17-18 19-19 19-20 25-21 26-22 23-24 27-25 25-26 27-27 27-28 29-29 29-30 33-32 33-33 33-34 35-35 35-36 36-37 37-38 39-40 35-41 42-42 42-43 44-44 45-45 45-46 46-47 47-48 +0-0 1-1 1-2 2-3 2-4 4-5 4-6 5-7 6-8 7-9 8-10 8-11 9-12 +0-0 1-1 2-2 2-3 2-4 3-5 4-6 7-7 7-8 6-9 5-10 9-11 13-12 10-13 12-14 12-15 15-16 14-17 19-19 19-21 21-22 21-23 23-24 24-25 25-26 32-27 33-28 29-30 31-31 32-32 32-33 30-34 33-35 34-36 +0-0 1-1 1-2 3-3 4-4 9-5 5-6 7-8 9-9 6-10 10-11 11-12 12-13 12-14 12-15 15-16 15-18 15-19 16-20 17-21 +0-0 1-1 3-2 4-3 4-4 4-5 6-6 4-7 9-8 7-9 11-10 12-11 9-12 12-13 15-14 12-15 11-16 13-17 18-18 18-19 20-20 20-21 21-22 22-23 23-24 24-25 26-26 26-27 25-28 29-30 29-32 31-33 32-34 32-35 33-36 38-37 35-38 35-39 38-40 39-41 41-42 42-43 43-45 +6-0 1-1 2-2 3-3 3-4 2-5 6-6 7-7 8-8 9-9 10-10 10-11 11-12 12-13 13-14 14-15 15-16 17-18 18-19 19-20 37-21 21-22 24-23 25-24 26-25 23-26 21-27 26-28 22-29 29-30 28-31 30-32 31-33 34-34 27-35 33-36 35-37 35-38 33-39 36-40 38-41 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 8-8 8-9 6-10 9-11 10-12 12-13 11-14 15-15 14-16 +0-0 1-1 1-2 5-3 4-4 3-5 6-6 7-8 8-9 9-10 10-11 11-12 12-13 13-14 13-15 13-16 14-17 16-18 18-19 18-20 18-21 21-22 20-23 19-24 19-25 18-26 14-28 23-29 24-30 24-31 25-32 26-33 30-35 29-36 32-37 31-38 33-39 +0-0 5-1 8-3 9-4 0-6 10-7 2-8 11-9 9-10 10-11 12-12 13-13 14-14 15-15 16-16 16-17 17-18 19-19 18-20 18-21 20-22 21-23 23-24 21-26 25-27 +0-0 0-1 1-2 2-3 4-4 4-5 5-6 7-7 8-8 8-9 9-10 10-11 11-12 +0-0 1-1 1-2 3-3 4-4 4-5 5-6 6-7 +0-0 3-1 2-2 11-3 5-5 5-7 8-10 11-11 12-12 13-13 15-14 16-15 17-16 18-17 7-18 17-19 18-20 20-21 22-22 18-23 18-24 24-25 26-26 24-27 27-28 30-29 29-30 29-31 31-32 29-33 33-34 +0-0 1-1 1-2 1-3 3-4 3-5 6-8 7-9 7-10 8-11 9-12 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 20-7 11-8 6-9 15-10 12-11 13-12 10-13 10-14 17-16 18-17 16-18 20-19 25-20 22-21 25-23 23-24 25-25 23-26 26-27 26-29 21-30 32-32 31-33 29-34 34-36 30-37 34-38 36-39 37-40 38-41 39-42 41-43 39-44 40-45 41-46 42-47 +1-0 9-1 7-2 8-3 7-4 4-5 6-6 9-7 4-8 5-9 10-10 13-11 14-12 15-13 16-14 17-15 19-16 20-17 23-19 22-20 27-21 28-22 30-23 31-24 26-25 31-26 32-27 +8-0 2-1 6-2 2-3 3-4 5-5 6-6 4-7 6-8 6-9 9-10 11-11 12-12 12-13 14-14 16-15 17-16 17-17 4-18 19-19 22-20 23-21 24-22 22-23 25-24 26-25 25-26 19-27 29-28 +0-0 1-1 2-2 4-3 2-4 5-5 9-6 15-7 6-8 3-10 10-11 6-12 7-13 11-14 14-15 10-16 19-17 18-21 26-22 20-23 18-24 24-26 23-27 29-28 30-29 25-30 25-31 33-32 31-33 32-34 27-35 31-36 34-38 34-39 35-40 35-41 36-42 37-43 +0-0 1-1 2-2 3-3 4-4 8-5 1-6 7-7 8-8 11-9 13-10 5-11 11-12 12-13 14-14 11-15 13-16 14-17 15-18 16-19 20-20 21-21 19-22 22-24 23-25 +0-0 23-1 2-2 3-3 3-4 3-5 9-6 6-7 4-8 7-9 6-10 9-11 10-12 4-13 7-14 14-15 11-16 11-17 11-18 15-19 15-20 17-22 16-23 16-24 18-25 18-26 21-27 20-29 17-30 25-31 27-32 22-33 29-34 25-35 26-36 27-37 31-38 29-39 27-40 34-41 34-42 35-43 +0-0 1-1 1-2 2-3 3-4 5-5 7-6 5-7 10-8 7-9 9-10 8-11 10-12 12-13 13-14 14-15 12-16 14-17 15-18 +0-0 0-1 1-2 5-3 5-4 6-6 2-7 3-8 4-9 9-10 5-11 9-12 10-13 19-15 14-16 14-17 16-18 18-19 16-20 17-21 18-22 20-23 21-24 +0-0 1-1 5-2 4-3 4-4 17-5 20-7 8-8 14-9 9-10 13-11 6-12 9-13 14-14 13-15 16-16 9-17 19-18 11-19 20-20 21-21 21-22 +1-0 5-2 3-3 3-4 5-5 6-6 7-7 6-8 8-9 15-11 15-12 12-13 13-14 11-15 17-16 16-17 19-18 18-19 20-20 +1-0 2-1 3-2 4-3 5-4 7-5 6-6 7-7 10-8 12-10 14-11 15-12 16-13 17-14 18-15 +0-0 1-1 2-2 4-3 3-4 5-5 7-6 8-7 6-8 6-9 12-11 8-12 15-13 17-14 18-15 21-16 22-17 19-18 20-20 23-22 25-23 27-24 28-25 29-26 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 9-7 7-9 7-10 10-12 11-13 10-14 13-15 12-16 14-17 +0-0 0-1 4-2 4-3 8-4 7-5 12-6 13-7 12-8 12-9 15-10 16-11 +0-0 4-1 7-2 4-3 3-5 8-6 10-7 11-8 12-10 13-11 14-12 15-13 21-14 17-15 18-17 20-18 21-19 24-20 21-22 24-24 31-25 26-26 31-27 31-28 32-29 37-30 37-31 38-32 39-33 41-35 +0-0 9-1 10-2 2-3 5-4 2-6 7-7 7-8 11-10 12-11 16-12 7-13 14-14 14-15 13-16 16-17 15-18 17-19 +5-1 1-2 1-3 4-4 3-5 7-6 8-7 9-8 11-9 10-10 10-11 11-12 12-13 14-14 14-15 15-16 16-17 +0-0 3-1 4-2 5-3 8-4 7-5 9-6 10-7 11-8 12-9 16-10 20-11 18-12 22-13 23-14 24-15 +0-0 1-1 6-2 11-3 10-4 4-5 8-6 12-7 11-8 14-9 16-10 15-11 13-12 17-13 20-14 21-15 18-16 27-18 20-19 23-20 28-21 27-22 27-23 30-24 32-25 35-26 34-27 35-28 31-29 34-30 36-31 +0-0 1-1 1-2 0-3 2-4 4-5 4-6 5-7 6-8 7-9 8-10 10-11 10-12 12-13 12-14 9-15 10-16 14-17 14-18 15-19 16-20 +1-0 2-1 2-2 4-3 5-4 6-5 8-6 5-7 9-8 +0-0 0-1 2-2 4-3 3-4 4-5 4-6 9-7 7-8 8-9 5-10 10-11 9-12 11-13 12-14 13-15 13-16 15-17 11-18 11-19 18-20 16-21 17-22 21-23 22-24 23-25 19-26 20-27 24-28 25-29 +0-0 1-1 4-2 6-3 5-4 5-5 7-6 9-7 10-8 10-9 12-10 13-11 14-12 15-13 16-15 19-16 19-17 21-19 22-20 22-21 24-22 28-23 26-24 25-25 32-26 31-27 28-28 31-29 33-30 36-31 34-32 35-33 37-34 +0-0 2-1 2-2 3-3 3-4 3-5 4-6 5-7 7-8 7-9 8-10 9-11 11-12 11-13 13-14 14-15 17-16 17-17 17-18 18-19 +0-0 1-1 3-2 1-3 4-6 7-7 9-8 12-10 7-11 11-12 14-13 14-15 17-16 22-17 20-19 21-20 22-21 23-22 26-23 24-24 27-25 27-26 30-27 29-28 31-29 +0-0 6-1 2-2 3-3 4-4 9-5 5-6 5-7 8-8 6-9 11-10 12-11 11-12 10-13 15-14 12-15 16-16 17-17 13-18 14-19 19-20 24-21 21-22 22-23 24-24 25-25 +0-0 1-1 2-3 3-4 4-5 5-6 8-7 8-8 11-9 7-10 8-11 12-12 13-13 14-14 15-15 15-16 16-17 20-18 19-20 22-22 23-23 24-24 +0-0 3-2 2-3 3-4 8-6 4-8 6-9 4-10 13-11 13-12 16-14 9-15 17-16 26-17 12-18 18-19 14-20 18-21 33-22 16-23 20-24 21-25 22-26 22-27 29-29 25-30 27-31 35-32 35-33 36-34 +0-0 1-1 3-2 3-3 6-4 7-5 3-6 7-7 9-8 9-9 10-10 11-11 +0-0 5-1 1-2 3-3 14-4 8-5 9-6 2-7 6-8 5-9 2-10 3-11 18-12 14-13 14-14 21-15 24-16 19-17 22-18 22-19 17-20 18-21 31-22 25-23 31-24 26-27 27-28 28-29 30-30 29-31 30-32 35-33 36-34 +0-0 1-1 1-2 2-3 2-4 5-5 6-7 4-8 6-9 8-10 9-11 9-12 11-13 12-14 +0-0 1-1 3-2 4-3 6-4 7-5 8-6 9-7 11-8 13-9 13-10 15-11 13-12 15-13 18-14 +1-0 2-1 2-2 3-3 6-5 3-6 4-7 5-9 9-10 3-11 12-12 13-13 12-14 18-15 8-16 13-17 16-18 17-19 16-20 18-21 15-22 18-23 15-24 20-25 19-26 21-27 23-28 23-29 28-30 25-31 24-32 26-33 26-34 29-35 30-36 30-37 30-38 31-39 +0-0 1-1 2-2 6-3 5-4 4-5 9-6 10-7 11-8 13-9 13-10 14-11 15-12 +1-0 2-3 3-4 9-5 5-6 6-7 7-8 8-9 8-10 9-11 10-12 11-13 9-14 13-15 13-16 14-17 15-18 15-19 18-20 17-21 18-22 19-23 20-24 21-25 22-26 +0-0 1-1 5-2 4-3 6-4 4-5 10-6 8-7 9-8 11-9 10-11 5-12 16-13 15-14 18-15 17-16 19-17 21-18 22-19 23-20 24-21 +0-0 1-1 2-2 3-3 5-4 7-5 9-6 6-7 12-8 12-9 14-10 17-11 18-12 11-13 19-14 19-15 19-16 20-17 21-18 24-19 22-20 26-21 25-22 27-23 28-24 +0-0 1-1 2-2 4-3 3-4 2-5 6-6 9-8 13-9 7-10 8-11 11-12 16-14 17-15 18-16 19-17 +1-0 2-1 6-2 4-3 4-4 5-5 8-6 4-7 10-8 8-9 8-10 17-11 11-12 14-13 14-15 16-16 15-17 15-18 15-19 13-21 21-22 21-23 24-24 25-26 24-27 27-28 26-29 27-30 28-31 +0-0 0-1 1-2 1-3 2-4 4-5 5-6 5-7 5-8 5-9 5-10 6-11 8-12 10-14 9-15 10-16 11-17 +0-0 2-1 2-2 1-3 3-4 4-5 5-6 1-7 6-8 7-9 9-10 9-11 9-12 10-13 11-14 +0-0 1-1 2-2 4-3 5-4 6-5 12-6 7-7 8-9 11-10 12-11 19-12 13-14 17-15 12-16 12-17 13-18 15-19 14-20 11-21 11-22 18-23 20-25 25-26 28-27 22-29 22-30 23-31 23-32 29-33 26-35 26-36 31-37 29-38 32-39 32-40 34-41 30-42 29-43 32-45 35-46 32-47 36-48 +0-0 3-1 4-2 4-3 6-4 3-5 5-6 7-7 7-8 9-9 8-10 8-11 11-12 12-13 12-14 13-15 12-16 14-17 17-18 15-19 17-20 18-21 +0-0 1-1 2-2 4-3 6-4 5-5 6-6 7-7 9-8 9-9 12-10 11-11 14-12 13-13 15-14 +0-0 0-1 2-2 0-3 3-4 8-5 10-6 5-7 7-8 8-9 9-10 8-11 11-12 14-14 14-15 14-16 15-17 +0-0 1-1 10-2 11-3 7-4 6-6 2-7 5-9 11-10 13-11 15-13 14-14 16-16 23-17 22-18 23-19 24-20 +0-0 15-1 3-2 1-3 1-4 5-5 5-6 6-7 9-8 9-9 10-10 11-11 12-12 21-13 13-14 6-15 14-16 15-17 15-18 11-19 13-20 19-22 31-23 23-24 19-25 24-26 25-27 24-28 27-29 26-30 14-31 25-32 28-33 26-34 25-37 32-38 31-39 32-40 33-42 36-44 31-45 35-46 34-47 35-48 42-49 37-50 38-51 41-52 45-53 42-54 45-55 43-56 50-58 51-59 52-60 53-61 52-62 50-63 54-64 +0-0 1-1 1-2 2-3 4-4 5-5 6-6 7-7 8-8 9-9 13-10 7-11 14-12 15-13 12-14 17-15 19-16 20-17 +0-0 3-1 4-3 3-4 3-5 6-6 7-7 7-8 5-9 10-10 11-11 9-12 14-13 12-14 12-15 13-16 15-17 18-18 17-19 19-20 17-21 14-22 21-23 22-24 +1-0 2-1 2-2 3-3 4-4 4-5 8-6 9-7 11-8 9-9 11-10 1-11 14-12 14-13 15-14 16-15 +0-0 1-1 2-2 2-3 3-4 5-5 3-6 5-7 6-8 7-9 +0-0 1-1 2-2 5-3 6-4 7-5 8-6 9-7 12-8 10-9 11-10 14-11 15-12 15-13 17-14 18-15 19-16 20-17 23-18 22-19 19-20 24-21 26-22 +0-0 0-1 1-2 4-3 3-4 6-5 6-6 8-7 8-8 10-9 12-10 11-11 11-12 13-13 +0-0 1-1 1-3 3-4 5-5 8-6 6-7 9-8 10-9 10-10 11-11 12-12 13-13 14-14 15-15 16-16 17-17 16-18 13-19 18-20 19-21 +0-0 1-1 2-2 5-3 4-4 7-5 8-6 6-7 9-8 8-9 8-10 10-11 14-12 12-13 15-14 15-15 16-16 17-17 15-18 18-19 18-21 24-22 20-23 20-24 27-25 24-26 23-27 26-28 25-29 28-30 29-31 29-32 30-33 31-34 +0-0 12-1 4-2 9-3 2-4 4-5 7-6 5-7 11-9 12-10 13-11 15-13 16-14 17-15 9-16 18-17 19-18 20-19 20-20 19-21 24-22 25-23 26-24 27-25 21-26 22-27 20-28 29-29 28-30 28-31 30-32 32-33 +0-0 4-1 1-3 1-4 8-5 1-6 8-7 6-8 12-9 13-10 7-11 2-12 7-13 11-14 10-15 11-16 11-17 12-18 16-19 14-20 15-21 18-22 15-23 17-24 16-25 21-26 20-27 20-28 23-29 23-30 21-31 23-32 25-33 25-34 +0-0 3-1 1-2 7-3 8-4 9-5 10-6 11-7 12-8 13-9 14-10 11-11 16-12 20-15 19-16 15-17 25-18 21-19 20-20 20-22 14-23 28-24 27-25 16-27 31-29 30-30 33-31 32-32 38-34 43-35 38-36 41-37 37-38 43-39 46-41 43-42 45-43 45-44 52-45 52-46 51-48 47-49 51-50 57-51 59-52 58-53 59-55 55-56 36-58 55-60 63-61 +0-0 5-2 2-3 4-4 7-6 7-7 5-8 9-9 9-10 11-11 11-12 13-13 17-15 16-16 20-17 20-18 21-19 +0-0 1-1 8-2 3-3 2-4 2-5 6-6 1-7 2-8 8-9 7-10 7-11 11-12 8-13 12-14 10-15 11-16 15-17 11-18 11-19 15-20 14-21 15-22 16-24 18-25 19-26 20-27 21-28 22-29 24-31 25-32 26-33 27-34 26-35 13-37 29-38 29-39 31-40 28-41 32-42 +0-0 1-1 3-2 2-3 4-5 5-6 5-7 6-8 7-9 12-10 11-11 8-13 14-14 12-15 10-16 12-17 14-18 15-19 16-20 +0-0 1-1 2-2 4-3 5-5 5-6 9-7 8-8 5-9 10-10 13-11 10-12 9-13 10-14 14-15 15-16 14-17 15-18 17-19 17-20 10-21 16-22 22-24 21-25 20-26 9-27 8-28 22-29 23-30 24-31 24-32 25-33 26-34 +1-0 2-1 7-3 9-4 3-5 5-6 9-7 6-8 10-9 4-10 13-11 12-12 14-13 14-14 16-15 17-16 18-17 19-18 +0-0 3-1 4-2 +0-0 1-1 3-2 +0-0 1-1 2-2 +0-0 9-1 1-2 1-4 9-5 8-6 8-7 15-8 15-9 12-10 13-11 13-12 11-13 17-14 19-17 20-18 27-19 25-20 23-21 31-22 25-23 26-24 28-26 24-28 33-29 33-30 34-31 +0-0 1-1 3-2 3-3 5-4 5-5 6-6 7-7 9-8 10-9 8-10 12-11 +2-0 4-1 5-2 2-3 15-5 10-8 12-9 14-10 11-11 12-12 16-13 16-14 21-15 20-16 7-17 20-18 21-19 23-20 24-21 +0-0 2-1 2-2 4-3 2-4 5-5 7-6 6-7 10-9 11-10 12-11 13-12 14-13 21-14 18-15 19-16 23-17 22-18 23-19 22-20 22-21 22-22 21-23 30-24 29-25 28-26 32-27 33-29 35-30 37-31 38-32 +1-0 9-1 2-2 5-3 4-4 6-5 6-6 8-7 7-8 8-9 9-10 10-11 15-12 11-13 12-14 14-15 14-16 15-17 19-18 19-19 18-20 20-21 21-22 23-23 24-24 +7-0 7-1 8-2 9-3 9-4 9-5 3-6 5-7 6-8 11-10 12-11 19-13 14-14 18-15 19-16 22-19 45-20 25-21 45-22 34-23 34-24 34-25 34-26 36-27 40-28 40-29 42-30 43-31 45-33 46-34 49-35 50-36 51-37 52-38 +4-0 15-1 16-2 1-3 25-4 16-5 11-6 3-8 8-9 20-10 7-11 8-12 20-13 8-15 19-16 20-17 21-18 22-19 23-20 23-21 24-22 25-23 24-24 28-25 29-26 30-27 31-28 32-29 35-30 32-31 33-32 36-33 36-34 38-35 38-36 41-37 40-38 43-39 42-40 45-41 46-42 47-43 47-44 48-45 49-46 51-47 53-48 53-49 53-50 55-51 56-52 57-53 +0-0 1-1 1-2 2-3 4-4 5-5 7-6 8-7 +0-0 4-1 1-2 0-3 5-4 7-5 7-6 9-7 8-8 8-9 9-10 10-11 10-12 12-13 14-14 16-15 17-16 19-17 21-18 +2-0 1-1 4-2 4-3 8-4 10-5 11-6 10-7 14-8 15-9 13-10 17-11 8-12 20-13 20-14 21-15 25-16 26-17 26-18 28-19 29-20 30-21 31-22 35-23 35-24 39-25 41-26 36-27 22-28 42-29 +0-0 4-1 4-2 4-3 6-5 5-6 7-7 7-8 8-9 10-11 14-12 10-13 13-14 11-15 17-16 16-17 18-18 16-19 19-21 19-22 20-23 21-24 +0-0 1-1 1-2 2-3 3-4 3-5 4-6 5-7 7-8 6-9 7-10 8-11 +0-0 2-1 3-2 +0-0 1-1 3-2 5-3 6-4 6-5 9-6 10-7 8-8 13-9 14-10 15-11 18-12 16-13 20-14 18-15 18-16 19-17 21-18 21-19 22-20 24-21 25-22 27-23 28-24 +0-0 11-1 4-2 4-3 7-4 7-5 9-6 14-7 11-8 3-9 13-10 5-11 17-12 18-13 17-14 20-15 25-16 25-17 23-18 22-19 21-20 28-21 2-22 32-23 24-24 34-25 35-26 +0-0 0-1 3-2 2-3 5-4 6-5 5-6 7-7 7-8 10-9 11-10 12-11 14-12 15-13 15-14 16-15 17-16 +0-0 2-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 11-10 12-11 +0-0 1-1 2-2 5-3 4-4 6-6 10-7 8-8 12-10 9-11 11-12 17-13 19-14 18-15 21-16 21-17 25-18 27-19 25-20 22-21 26-22 30-23 30-24 32-25 32-26 32-27 34-28 35-29 +0-0 1-1 3-2 5-4 2-5 6-6 6-7 8-8 9-9 10-10 11-11 +4-0 2-1 2-2 4-3 4-4 4-6 4-7 7-8 11-9 10-10 1-11 14-13 11-14 11-15 11-16 14-17 21-18 21-19 21-20 21-21 21-22 13-23 22-24 27-25 27-28 18-29 29-30 20-31 29-32 41-36 41-37 40-38 24-39 40-41 40-42 41-43 42-44 +3-0 3-1 6-2 8-3 4-5 4-6 6-7 11-8 7-9 8-10 6-11 20-14 25-15 25-16 9-22 21-25 20-26 41-27 27-29 41-31 33-34 32-35 35-36 51-42 46-43 46-44 48-45 51-46 37-48 51-49 47-50 58-51 58-52 60-53 57-54 61-55 61-56 64-57 64-58 +0-0 0-1 3-2 5-3 7-5 8-6 9-7 11-8 12-9 13-10 16-11 18-12 17-13 18-14 21-15 23-16 20-17 25-18 25-19 25-20 26-21 +1-0 6-1 1-2 1-3 1-4 4-5 4-6 7-8 4-9 8-11 8-12 9-13 10-14 13-15 11-16 12-17 11-19 14-20 14-21 16-22 +0-0 3-1 4-2 +0-0 3-1 1-2 5-3 5-4 6-5 5-6 2-7 9-8 8-9 9-10 10-11 11-12 14-14 15-15 +1-0 4-1 4-2 5-4 6-5 12-6 11-7 12-8 11-9 13-10 13-11 25-12 18-13 16-14 17-16 16-17 24-18 22-19 27-22 31-23 32-24 29-25 34-26 27-27 35-28 35-29 +0-0 1-1 2-2 3-3 3-4 6-5 10-6 8-8 15-9 13-10 14-11 17-12 17-13 18-14 17-15 21-16 22-17 23-18 24-19 24-20 26-21 27-22 29-24 33-25 31-26 32-28 37-29 34-30 36-31 38-32 39-33 +8-0 7-1 8-2 5-3 0-4 1-5 6-6 11-7 12-8 12-9 12-10 12-11 3-12 13-13 9-14 18-15 19-16 23-17 20-18 21-19 24-20 26-21 28-22 25-23 27-24 21-26 32-27 34-28 33-29 37-31 37-32 35-33 39-34 +0-0 1-1 4-2 6-3 7-4 7-5 8-6 10-7 13-8 10-9 14-10 15-11 +0-0 1-1 5-3 3-4 3-5 6-6 6-7 7-8 11-9 13-10 17-11 17-12 13-13 18-14 17-15 20-16 21-17 23-19 26-20 26-21 27-22 +3-0 3-1 3-2 4-3 10-4 7-5 7-6 11-7 12-8 12-9 17-10 17-11 16-12 21-13 21-14 23-15 24-16 25-17 26-18 +6-0 1-1 2-2 1-4 3-5 7-7 7-8 10-9 11-10 13-11 14-12 16-13 17-14 19-16 18-17 20-18 22-19 25-21 14-22 24-23 27-25 28-26 29-27 30-28 31-29 31-30 33-31 31-32 27-33 37-35 38-36 38-37 23-38 39-39 +0-0 1-1 2-2 3-3 5-4 10-5 7-6 8-7 8-8 10-9 14-10 11-11 14-12 18-13 19-14 20-15 21-16 20-17 22-18 23-19 24-20 24-21 25-22 27-24 27-25 30-26 30-27 31-28 +0-0 1-1 2-2 1-3 4-4 5-5 6-6 12-7 9-8 7-9 9-10 3-11 12-12 11-13 11-14 12-15 13-16 15-17 16-18 16-19 17-20 18-21 +1-0 1-1 2-2 4-3 10-4 6-5 7-6 6-7 9-8 9-9 10-10 13-11 13-12 15-13 +2-0 +0-0 1-1 3-2 4-3 5-4 +0-0 1-1 2-2 +0-0 0-1 1-2 2-3 3-4 4-5 5-6 5-7 6-8 7-9 +2-0 2-1 7-2 5-3 6-4 9-5 15-6 8-8 12-9 17-10 14-11 15-12 20-13 18-14 22-15 21-16 25-17 24-18 24-19 26-20 +0-0 1-1 2-2 3-3 1-4 5-5 7-6 7-7 8-8 +0-0 1-1 +0-0 3-1 2-2 5-3 3-4 5-5 6-6 8-7 14-8 10-9 16-10 17-11 18-12 12-13 10-14 11-15 20-16 19-17 20-18 20-19 21-20 24-21 24-22 26-23 28-24 26-25 28-26 30-27 31-28 +0-0 1-1 2-2 4-3 5-4 26-5 6-6 6-7 3-8 7-9 8-10 9-11 10-12 13-13 17-15 19-16 20-17 17-18 17-19 23-21 22-22 25-23 27-24 30-25 31-26 30-27 32-28 +0-0 5-1 5-2 9-3 5-4 10-5 18-6 16-7 20-8 22-9 24-10 25-11 +0-0 1-1 3-2 3-3 5-4 6-5 5-6 7-7 9-8 9-9 11-10 11-11 12-12 +1-0 9-1 2-2 1-3 6-4 6-5 5-6 10-7 10-8 18-9 12-10 13-12 11-13 10-14 12-15 12-16 15-18 18-19 19-20 20-21 21-22 22-23 24-24 25-26 26-27 25-28 28-29 28-30 29-31 +0-0 1-1 1-2 4-3 6-4 8-5 7-6 8-7 12-8 15-9 16-10 17-11 17-12 17-14 22-15 24-16 25-17 26-18 28-19 28-20 28-21 33-22 34-23 +0-0 2-1 3-2 5-3 6-4 8-5 10-6 12-7 11-8 14-9 15-10 +0-1 5-2 5-3 2-4 2-5 3-6 8-7 6-8 6-9 12-10 11-11 5-12 14-13 15-14 16-15 17-16 17-17 20-18 18-19 19-20 22-21 23-22 25-23 27-24 24-25 28-26 28-27 +1-0 2-1 3-2 4-3 4-4 6-5 7-6 10-7 7-8 13-9 15-10 9-11 10-12 19-13 16-14 18-15 20-16 22-17 22-18 24-19 22-21 24-22 27-23 28-24 23-25 34-26 33-27 34-28 32-29 30-30 31-31 32-32 35-34 38-36 37-37 +2-0 2-1 3-3 3-4 7-5 8-6 6-7 11-8 10-9 12-10 14-11 15-12 16-13 17-14 19-15 +0-0 7-1 7-2 10-3 2-4 6-5 6-6 6-7 3-8 11-9 11-10 14-11 12-12 13-13 12-14 16-15 20-16 18-17 19-18 21-19 25-20 26-21 24-22 25-23 26-24 26-25 27-26 30-27 30-28 30-29 +15-0 6-1 6-2 3-3 1-4 7-5 4-6 9-7 10-8 13-9 14-10 17-11 17-13 18-14 +0-0 0-1 0-2 2-3 2-4 4-5 4-6 4-7 5-8 5-9 6-10 7-12 7-13 8-14 +0-0 1-1 2-2 3-3 4-4 +3-0 5-1 6-2 8-3 9-4 10-5 7-6 9-7 13-10 14-11 15-12 15-13 18-14 20-15 20-16 25-17 22-18 30-19 22-20 24-21 28-22 31-23 26-24 32-25 31-26 31-27 32-28 33-29 38-30 38-31 36-32 41-33 41-34 42-35 +5-0 3-1 4-2 5-3 5-4 9-6 10-7 11-8 13-9 14-10 +0-0 1-1 1-2 2-3 3-4 5-5 6-6 7-7 9-9 9-10 10-11 10-12 11-13 12-14 15-16 13-17 14-18 15-19 16-20 17-21 18-22 18-23 19-24 20-25 +0-0 1-1 2-2 5-3 2-4 4-5 5-6 8-7 9-8 9-9 10-10 11-11 12-12 13-13 13-14 14-15 15-16 16-17 17-19 18-20 +1-0 2-1 1-2 0-3 5-5 6-7 12-9 8-10 8-11 4-12 9-13 15-14 13-15 16-16 12-17 22-18 23-19 24-20 28-21 25-22 21-23 24-24 25-25 21-26 29-27 19-28 29-29 32-31 +4-0 4-1 2-2 3-3 0-4 1-5 5-6 5-7 5-8 6-9 12-10 12-11 15-12 13-13 9-14 17-15 12-16 13-17 18-18 17-19 19-20 +0-0 0-1 1-2 2-3 3-5 4-6 5-7 7-8 7-9 8-10 8-11 9-12 +1-0 2-1 2-2 5-3 5-4 0-5 7-6 11-7 10-8 11-9 9-10 14-11 11-12 15-13 15-14 16-15 18-16 21-17 21-18 +0-0 2-1 4-2 5-3 7-4 +0-0 2-1 1-2 3-3 5-4 7-5 5-6 5-7 10-8 9-10 10-11 11-12 +1-0 8-1 2-2 3-3 4-4 5-5 6-6 10-8 9-9 10-10 11-11 13-12 14-13 +4-0 5-1 3-2 0-3 2-4 8-5 11-7 12-8 12-9 14-10 17-11 21-12 23-14 24-15 25-16 27-17 28-18 27-19 31-21 +0-0 0-1 3-2 4-3 3-4 6-5 5-6 9-7 8-8 5-9 15-10 9-11 10-12 11-13 14-14 13-15 15-16 15-17 16-18 17-19 17-20 20-21 21-22 21-23 22-24 22-25 +0-0 0-1 3-2 4-3 2-4 8-7 8-8 9-9 11-11 11-12 14-13 12-14 13-15 16-16 17-17 18-19 27-20 27-21 28-22 22-23 23-24 26-25 30-27 30-28 31-29 26-30 30-31 34-33 31-34 35-35 36-36 37-37 +0-0 1-1 2-2 3-3 1-4 6-7 8-8 7-9 8-10 9-11 11-12 14-13 11-14 14-15 13-16 13-17 16-18 15-19 19-20 17-21 18-22 19-23 21-24 22-25 23-26 19-27 23-28 24-29 +0-0 1-1 2-2 3-3 +0-0 1-1 2-2 +0-0 1-1 2-2 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 9-7 9-8 12-9 5-10 13-11 18-12 21-13 19-14 19-15 24-17 25-18 28-19 29-20 27-22 30-23 31-24 32-25 33-26 35-27 36-28 +0-0 2-1 3-2 5-3 7-4 7-5 9-6 +3-0 5-1 2-2 2-3 6-4 5-5 3-6 5-7 3-8 21-9 12-10 10-11 9-12 12-13 14-15 15-16 37-17 15-18 15-19 19-21 8-22 21-23 44-27 45-28 29-30 29-31 16-32 32-34 2-35 36-36 36-37 34-38 32-39 36-41 39-42 13-43 14-44 42-46 35-47 44-48 20-49 46-50 +0-0 2-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 8-9 11-10 12-11 13-12 12-13 14-14 15-15 15-16 17-17 +0-0 1-1 3-2 3-3 5-4 5-5 5-6 7-7 8-8 9-9 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 6-7 11-8 8-9 13-10 11-11 15-12 17-13 10-14 12-15 18-16 19-17 +1-0 9-1 4-2 4-3 5-4 7-5 7-6 6-7 11-8 9-10 13-11 16-12 16-13 16-14 19-15 +1-0 4-1 3-2 6-3 3-4 1-5 10-6 12-7 12-8 16-9 18-10 13-11 14-12 15-13 19-15 19-16 25-17 20-18 25-19 22-20 32-21 35-23 34-24 27-25 29-26 40-27 23-28 33-29 40-30 41-31 42-32 43-33 44-34 45-36 35-37 48-38 +0-0 4-1 2-2 3-3 5-4 18-5 6-6 12-7 9-8 7-9 9-10 15-11 18-12 19-13 20-14 19-15 20-16 21-17 20-18 18-19 26-20 26-21 27-22 +0-0 1-1 2-2 2-3 8-5 6-6 7-7 8-9 9-10 10-11 11-12 14-13 15-14 14-15 16-16 17-17 +2-0 5-1 3-2 1-4 3-5 6-6 10-7 10-8 11-9 12-10 15-11 16-12 +5-0 4-1 7-2 1-3 2-4 3-5 7-6 8-7 6-8 9-9 9-10 9-11 5-12 15-13 16-14 17-15 18-16 19-17 20-18 22-19 19-20 23-21 27-22 24-23 25-24 26-25 25-26 28-27 +1-0 1-1 2-2 1-3 7-4 9-5 6-7 10-8 13-10 10-11 13-12 13-13 14-14 18-15 19-16 20-17 21-18 22-19 23-20 24-21 25-22 +0-0 2-1 4-2 3-3 6-4 1-5 6-6 8-7 13-8 5-9 11-10 14-11 15-12 15-13 11-14 14-15 20-16 +0-0 1-1 2-2 2-3 6-4 2-5 9-6 1-7 10-8 18-9 12-10 13-11 17-12 18-13 17-14 20-15 17-16 23-17 23-18 24-19 25-20 27-21 28-22 28-23 13-24 27-25 34-26 36-27 34-28 36-29 36-30 41-31 35-32 39-33 35-34 44-35 39-36 35-37 44-38 44-39 49-40 51-41 55-43 53-44 46-45 55-46 56-47 58-48 58-49 61-50 61-51 62-52 66-54 +4-0 2-1 4-2 6-3 5-4 6-5 7-6 7-7 8-8 10-9 10-10 11-11 12-12 12-13 13-15 14-16 15-17 16-18 17-19 18-20 29-21 26-22 22-23 23-24 16-25 17-26 24-27 26-28 28-29 30-30 29-31 31-32 +0-0 1-1 2-2 3-3 4-4 +0-0 3-1 4-2 1-3 2-4 7-5 9-6 10-7 11-8 +1-0 3-1 3-2 7-3 0-4 12-5 6-6 6-7 16-8 17-9 9-11 10-12 18-13 10-14 14-15 21-17 20-18 18-19 25-20 24-21 26-22 31-23 24-24 31-25 22-26 34-28 33-29 33-30 37-32 35-33 39-34 47-35 48-36 41-37 42-38 42-39 44-40 45-41 42-42 40-43 49-44 49-46 48-47 46-49 54-50 56-52 53-53 53-55 58-56 50-57 57-58 +0-0 1-1 2-2 6-3 5-4 5-5 0-6 6-7 9-8 10-9 9-10 15-11 13-12 16-13 18-14 16-15 22-16 23-17 31-19 24-20 22-21 25-22 30-23 29-24 32-25 31-26 19-27 34-28 +2-0 2-1 4-2 4-3 7-4 8-6 8-7 10-8 11-9 12-10 13-11 14-12 16-13 18-14 17-15 19-16 20-17 21-18 +0-0 2-1 3-2 5-3 7-4 5-5 9-6 12-7 12-8 12-9 16-10 17-11 18-12 22-13 25-14 23-15 21-16 26-17 +0-0 2-1 3-2 6-3 5-4 5-5 8-6 8-7 9-8 11-9 4-10 13-11 14-12 +0-0 1-1 6-2 6-3 5-4 6-5 7-6 8-7 12-8 13-9 11-10 11-11 17-12 19-13 20-14 22-15 19-16 5-17 21-18 21-19 25-20 26-21 27-22 28-23 29-24 31-25 32-26 33-27 34-28 36-29 37-30 38-31 39-32 39-33 40-34 41-35 42-36 44-37 +0-0 2-1 3-2 4-3 6-4 7-5 8-6 9-7 11-8 13-9 15-10 16-11 16-12 19-13 20-14 +0-0 1-1 2-2 4-3 3-4 4-5 5-6 5-7 6-8 7-9 6-10 7-11 8-12 11-14 +0-0 1-1 4-2 2-3 5-4 5-5 7-6 8-7 10-8 11-9 13-10 14-11 15-12 +0-0 2-1 3-2 6-3 7-4 4-6 8-7 8-8 10-9 11-10 13-11 12-12 13-14 15-15 15-16 19-17 19-18 23-19 22-20 24-21 25-22 13-23 28-25 28-26 30-27 29-28 32-29 34-30 33-31 35-32 +1-0 1-1 3-2 2-3 4-4 5-5 6-6 2-7 8-8 9-9 9-10 12-11 13-12 13-13 14-14 +0-0 3-1 3-2 3-3 4-4 4-5 5-6 3-7 6-8 7-9 7-10 8-11 9-12 9-13 12-14 14-15 0-16 2-18 17-19 18-20 14-21 15-22 21-23 22-24 21-25 23-26 +0-0 3-1 0-2 0-3 4-4 5-5 9-6 8-7 9-8 10-9 11-10 12-11 12-12 15-13 17-14 16-15 16-16 19-17 21-18 20-19 +0-0 3-2 3-3 3-4 1-5 2-6 10-7 11-8 9-9 9-10 9-11 10-12 11-13 12-14 13-15 15-16 19-17 18-18 21-19 40-22 41-23 38-24 36-26 24-28 30-31 35-32 30-33 37-34 31-35 40-36 33-37 41-38 42-39 +2-0 4-1 6-3 3-4 1-5 6-7 10-8 11-9 12-10 13-11 13-12 16-13 16-14 17-15 15-16 21-19 21-20 21-21 21-22 16-23 24-24 26-25 28-26 28-27 28-28 28-29 32-32 32-33 33-34 35-36 45-37 35-38 38-40 39-41 40-42 41-43 42-44 44-45 46-46 45-47 47-48 48-49 49-50 +1-0 2-1 5-2 6-3 8-4 +0-0 1-1 2-2 6-3 1-4 8-5 9-6 9-7 10-8 13-9 15-10 12-11 15-12 16-13 +0-0 1-1 3-2 4-3 6-4 7-5 +0-0 3-1 6-2 7-3 +0-0 2-1 2-2 3-3 22-4 5-5 7-6 11-7 8-8 6-9 7-10 18-11 15-12 18-13 18-14 16-15 17-16 19-17 12-18 13-19 14-20 23-21 25-22 27-23 +7-0 2-1 0-2 12-3 5-4 5-5 9-7 10-8 6-9 13-10 11-11 8-12 15-13 17-14 28-15 15-16 16-17 17-18 23-19 18-20 19-21 20-22 21-23 25-24 29-25 +0-0 2-1 1-2 5-3 6-4 7-5 8-6 9-7 11-8 14-9 15-10 17-11 19-12 19-13 22-14 22-15 +1-0 2-1 2-2 8-3 4-4 5-5 2-6 7-7 4-8 9-9 6-10 13-13 14-14 15-15 14-16 18-17 17-18 20-19 20-20 21-21 +1-0 2-1 2-2 3-3 5-4 3-5 3-6 7-7 8-8 15-9 16-10 12-11 12-12 13-13 21-14 17-15 22-16 17-17 24-18 25-19 17-20 21-21 14-22 9-23 23-24 25-25 26-26 +0-0 24-2 2-3 6-4 6-5 6-6 17-8 10-9 19-10 20-11 12-12 9-13 25-14 18-15 21-16 18-17 19-18 21-19 14-20 15-21 25-22 26-23 +0-0 0-1 1-2 5-3 5-4 4-5 8-6 6-7 10-8 12-9 12-10 12-11 14-12 14-13 12-14 17-15 17-16 21-17 19-18 20-19 22-20 22-21 24-22 +0-0 0-1 3-2 1-3 6-4 8-5 8-6 9-7 9-8 14-9 14-10 14-11 16-12 +0-0 0-1 1-2 1-3 4-4 5-5 6-6 7-7 9-8 10-9 11-10 +2-0 1-1 2-2 3-3 4-4 19-5 6-6 7-8 7-9 7-10 11-11 11-12 12-13 14-14 16-15 17-16 23-18 20-20 7-21 20-22 21-23 22-24 26-25 28-26 29-27 32-28 22-29 29-30 33-32 +0-0 1-1 3-2 4-3 6-4 8-5 8-6 +0-0 5-1 1-2 11-3 6-4 12-5 13-6 8-8 13-9 16-10 16-11 17-12 19-13 19-14 20-15 21-16 22-18 24-19 27-20 29-21 +0-0 1-1 2-2 3-3 7-4 10-5 6-6 11-7 12-8 7-9 8-10 9-11 13-13 14-14 21-15 20-16 23-17 24-18 22-19 18-21 19-22 21-23 21-24 20-25 30-27 32-29 19-30 27-31 36-33 36-34 37-35 39-36 39-37 39-38 41-39 43-40 46-41 48-42 47-43 45-44 49-45 50-46 50-47 53-48 55-49 55-50 56-51 57-52 +1-0 1-1 3-2 4-3 5-4 6-5 7-6 9-7 +0-0 2-1 2-2 11-3 6-4 6-6 15-8 10-10 14-11 13-12 14-13 17-14 17-15 22-16 22-17 17-18 20-19 23-20 22-21 23-22 18-23 28-25 30-27 31-28 +1-0 1-1 2-2 7-3 3-4 9-5 5-6 8-7 1-8 11-9 10-10 12-11 14-12 14-13 13-14 14-15 16-16 17-17 +0-0 1-1 2-2 3-3 4-4 5-5 1-6 7-7 4-8 4-9 8-11 9-12 17-13 10-14 11-15 14-16 14-17 13-19 16-20 17-21 17-22 18-23 21-24 22-25 24-26 24-27 25-28 +0-0 1-1 2-2 4-4 5-5 5-6 8-7 8-8 9-9 +0-0 2-1 3-2 4-3 4-4 5-5 6-6 7-7 7-8 9-9 10-10 9-11 11-12 12-13 13-14 11-15 14-16 +0-0 2-1 4-2 5-3 7-4 8-5 9-6 10-7 12-8 13-9 14-10 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 7-9 9-10 13-11 11-12 10-13 14-14 17-15 17-16 17-17 17-18 19-19 17-20 18-21 21-22 +0-0 1-1 3-2 4-3 4-4 5-5 7-6 9-7 9-8 10-9 12-10 13-11 +0-0 2-1 2-2 2-3 3-4 6-5 5-6 7-7 8-8 +0-0 1-1 2-2 3-3 3-4 5-5 6-6 9-7 8-8 23-10 11-11 11-12 12-13 13-14 17-15 15-16 15-17 19-18 18-19 19-20 20-21 23-22 24-23 +1-0 1-1 1-2 2-3 3-4 4-5 4-6 5-7 6-8 6-9 7-10 8-11 +2-0 2-1 2-2 5-3 6-4 +1-0 2-1 3-2 17-3 3-4 7-5 5-8 13-9 14-11 15-13 18-14 +0-0 12-1 13-2 4-3 10-4 7-5 9-6 10-7 19-8 10-9 13-10 15-11 16-12 16-13 19-14 18-15 21-16 +0-0 0-1 1-2 2-3 2-4 2-5 4-6 6-7 4-8 6-10 9-11 1-12 8-13 7-14 9-15 9-16 10-17 12-18 12-19 12-20 13-21 +0-0 1-1 2-2 3-3 4-4 9-5 6-6 8-7 10-8 9-9 14-10 11-11 12-12 14-13 14-14 15-15 16-16 +0-0 1-1 2-2 4-3 5-4 5-5 6-6 9-7 10-8 11-9 13-10 14-11 +0-0 1-1 2-2 4-3 3-4 7-6 7-7 9-8 11-10 11-11 11-12 14-13 13-14 15-15 10-16 16-17 17-18 16-19 22-21 23-22 24-23 25-24 23-25 21-26 26-27 26-28 +2-0 0-1 0-2 1-3 3-4 2-5 13-6 6-7 6-8 8-9 9-11 10-16 15-19 16-20 14-21 17-22 18-23 19-24 20-25 20-26 20-27 21-28 20-29 23-30 24-31 +0-0 1-1 3-2 4-3 4-4 5-5 6-6 10-7 11-8 12-9 13-10 11-11 12-12 14-13 16-14 16-15 17-16 20-17 20-18 21-19 23-20 24-21 +0-0 0-1 1-2 10-3 4-4 7-5 6-6 7-7 7-8 8-9 12-10 13-11 10-12 14-13 13-14 15-15 18-16 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 5-8 +1-0 2-1 4-2 5-3 7-4 8-5 11-6 12-7 +0-0 1-1 4-2 3-3 7-4 7-5 8-6 6-7 9-8 10-9 11-10 12-11 13-12 13-13 14-14 +0-0 11-1 3-2 4-3 5-4 6-5 8-6 9-7 12-8 10-9 10-10 14-11 16-12 13-13 19-14 22-15 15-16 16-17 22-18 24-19 22-20 21-21 30-22 31-23 29-24 25-25 24-26 22-27 30-28 31-29 32-30 +0-0 0-1 1-2 1-3 3-4 4-5 3-6 2-7 6-8 7-9 7-10 7-11 9-12 10-13 11-14 12-15 13-16 13-17 14-18 +4-0 4-1 6-2 8-3 9-4 7-5 6-6 10-7 10-8 11-9 15-11 14-12 16-13 15-14 18-15 19-16 20-17 22-18 24-19 25-20 22-21 26-22 +0-0 0-1 1-2 2-3 3-4 5-5 5-6 7-7 8-8 10-9 11-10 12-11 +1-0 1-1 4-2 4-3 6-4 3-5 5-6 10-7 11-8 12-9 10-10 11-11 13-12 +1-0 0-1 1-2 4-3 7-4 5-5 5-6 2-8 3-9 12-10 13-11 12-12 14-13 8-14 15-15 16-16 10-17 11-18 19-19 20-20 21-21 +0-0 2-1 5-2 5-3 8-4 7-5 9-6 10-7 11-8 13-9 13-10 11-11 16-12 14-13 17-14 18-15 17-16 20-17 19-18 20-19 22-20 +0-0 1-1 2-2 2-3 3-4 3-5 4-6 6-7 7-9 7-10 9-11 9-12 11-13 12-14 +0-0 1-1 2-2 3-3 4-4 5-5 10-7 10-8 15-9 12-10 11-12 11-13 15-14 14-15 17-16 +0-0 1-1 2-2 4-3 6-4 6-5 3-6 8-7 11-8 9-9 10-10 13-11 16-12 17-13 +6-0 2-1 7-2 2-3 4-4 9-5 7-6 8-7 10-8 13-9 12-10 13-11 16-13 17-14 +0-0 5-1 6-2 7-3 8-4 9-5 9-6 11-8 13-9 14-10 15-11 17-12 19-13 21-14 23-15 24-16 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 7-7 +0-0 1-1 1-2 0-3 4-4 2-5 4-6 6-7 7-8 7-9 8-10 9-11 8-12 10-13 10-14 +1-0 1-1 4-2 4-3 11-4 3-5 5-6 7-7 9-8 11-9 14-10 12-11 16-12 16-13 17-14 +0-0 1-1 4-2 4-3 3-4 6-5 26-6 27-7 30-8 7-9 8-11 13-12 10-13 13-14 10-15 13-16 18-17 16-18 21-19 19-20 28-21 24-22 24-23 28-25 31-27 31-28 32-29 +0-0 1-1 4-2 10-3 4-4 6-5 11-6 5-7 12-8 8-9 8-10 10-11 13-12 15-13 18-14 19-15 22-17 23-18 24-19 27-20 28-21 30-22 31-23 31-24 32-25 +1-0 2-1 3-2 1-3 7-4 7-5 +0-0 2-1 3-2 5-3 3-4 6-5 8-6 6-7 10-8 9-9 10-10 11-11 +0-0 1-1 2-2 3-3 6-4 8-5 7-6 9-7 7-8 11-9 14-10 15-11 16-12 15-13 19-14 17-15 18-16 +0-0 2-1 5-2 4-3 9-4 9-5 11-6 12-7 15-8 15-9 7-10 19-11 +0-0 1-1 2-2 4-3 3-4 5-5 6-6 8-7 7-8 11-9 10-10 11-11 18-12 18-13 21-14 16-15 13-16 14-17 15-18 16-19 17-20 12-21 37-22 26-23 24-24 36-27 37-28 38-29 33-31 29-32 35-33 34-34 34-35 39-36 +0-0 2-1 3-2 3-3 5-4 0-5 7-6 9-7 6-8 8-9 8-10 13-11 14-12 +1-0 3-1 4-2 6-3 7-4 8-5 9-6 10-7 +0-0 0-1 0-2 1-3 +0-0 3-1 6-2 4-3 5-4 7-5 10-6 9-7 13-8 10-9 11-10 15-11 17-12 20-13 21-14 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 8-9 9-10 9-11 11-12 12-13 12-14 13-15 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-10 10-12 9-13 12-14 16-15 13-16 18-17 19-18 20-19 +0-0 8-1 2-2 4-3 4-4 5-5 11-6 12-7 13-8 10-9 11-10 11-11 10-12 18-13 14-14 0-15 12-16 15-17 18-18 19-19 +3-0 1-2 3-3 3-4 9-5 11-6 16-7 7-8 12-9 16-11 16-12 13-13 17-14 30-15 32-17 33-18 34-19 29-20 36-21 29-22 38-23 29-24 16-25 18-26 18-27 31-28 22-29 21-31 21-32 23-34 24-35 37-36 40-37 +0-0 1-1 1-2 13-3 4-4 0-5 7-6 9-7 9-8 11-9 12-10 4-11 14-12 +0-0 2-1 3-2 3-3 8-4 10-5 8-6 11-7 13-8 15-9 19-10 17-11 18-12 21-13 20-14 23-15 +0-0 1-1 6-2 4-3 8-4 5-6 9-7 10-8 13-9 12-10 13-11 15-12 20-13 21-14 17-15 22-16 21-17 23-18 +0-0 2-1 2-2 1-3 2-4 8-5 9-6 8-7 11-8 12-9 13-10 14-11 17-12 15-13 13-14 18-15 32-17 24-20 27-21 27-22 28-23 27-24 27-25 29-27 30-28 35-29 34-30 33-31 39-32 39-33 32-34 32-35 40-36 +2-0 2-1 3-2 2-3 2-4 7-5 9-7 10-8 27-9 12-10 16-11 13-12 20-13 20-14 18-15 22-16 25-17 22-18 11-19 21-20 24-21 14-22 28-23 +0-0 1-1 2-2 3-3 10-4 5-5 7-6 7-7 8-8 9-9 12-10 26-11 27-12 28-13 6-14 16-15 23-16 24-17 22-18 14-19 22-20 25-21 14-22 29-23 29-24 30-25 31-26 +0-0 0-1 3-2 5-3 5-4 6-5 6-6 5-7 11-8 12-9 13-10 15-11 13-12 14-13 16-14 +0-0 9-1 1-2 8-3 12-5 1-6 3-7 4-8 12-9 15-10 16-11 20-12 17-13 20-14 23-15 20-16 25-17 25-18 28-19 25-20 30-21 27-22 34-23 31-24 23-25 31-26 33-27 23-28 38-29 40-30 38-31 44-32 46-33 47-34 47-35 48-36 +0-0 1-1 2-2 3-3 2-4 4-5 2-6 6-8 7-9 7-10 10-11 11-12 13-13 10-14 14-15 12-16 14-17 15-18 +0-0 1-1 3-2 3-3 4-4 5-5 7-6 6-7 8-8 9-9 10-10 11-11 13-12 14-13 15-14 16-15 17-16 +0-0 1-1 1-2 3-3 3-4 4-5 7-6 6-7 8-9 8-10 11-11 10-12 12-13 21-14 22-15 23-16 14-17 15-18 20-19 19-20 20-21 18-22 20-23 24-25 +1-0 1-1 3-2 3-3 5-4 7-5 6-6 9-7 6-8 4-9 5-10 13-11 14-12 16-13 16-14 17-15 +1-0 2-1 7-3 8-4 10-5 3-6 8-9 10-10 11-11 9-12 19-13 12-14 14-15 14-16 13-17 15-18 17-19 18-20 17-21 20-23 21-24 +1-0 1-2 4-4 4-5 7-6 6-7 15-8 9-9 11-10 12-11 14-13 14-14 15-15 18-16 15-18 21-19 23-20 20-21 21-22 22-24 24-25 27-28 29-29 28-30 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 8-8 7-9 +0-0 1-1 2-2 4-3 2-4 4-5 14-6 6-7 7-8 8-9 9-10 10-11 11-12 13-13 5-14 15-16 17-17 17-18 15-19 3-20 20-21 21-22 20-23 22-24 23-25 24-26 26-27 25-28 +13-0 15-2 2-3 7-5 5-6 17-7 6-8 19-9 6-10 14-12 13-13 13-14 16-15 1-16 12-17 17-18 18-19 5-20 18-21 20-22 23-23 24-25 +0-0 1-1 2-2 4-3 7-4 6-5 9-6 8-7 11-8 12-9 13-10 14-11 15-12 16-13 17-14 +6-0 2-1 1-2 2-3 4-4 5-5 8-6 7-7 6-8 8-9 11-10 11-11 12-12 13-13 14-14 15-15 12-16 20-17 16-18 18-19 19-20 20-21 20-22 21-23 22-24 23-25 26-26 25-27 28-28 27-29 24-30 29-31 +0-0 1-1 3-2 3-3 6-4 7-5 8-6 10-7 10-8 11-9 +0-0 4-1 3-2 4-3 6-4 7-5 +0-0 3-1 1-2 2-3 4-4 5-5 7-6 7-7 9-8 10-9 11-10 12-11 13-12 14-13 15-14 14-15 16-16 18-17 18-18 18-19 21-20 21-21 23-22 24-23 23-24 26-25 21-26 26-27 27-28 +10-0 10-1 3-2 4-3 15-4 1-5 0-6 2-7 11-8 11-10 8-11 9-12 16-13 20-18 14-19 25-20 17-21 29-23 25-24 25-25 25-26 32-27 37-28 38-29 26-30 32-31 35-32 42-33 43-34 40-35 40-36 41-37 41-39 44-40 48-41 46-42 48-43 +0-0 1-1 2-2 1-3 5-4 6-5 6-6 7-7 9-8 11-10 12-11 13-12 14-13 15-14 16-15 +0-0 1-1 1-2 3-3 4-4 5-5 10-6 9-7 9-8 11-9 11-10 12-11 13-12 14-13 17-14 17-15 19-16 20-17 22-18 22-19 22-20 24-21 27-22 +0-0 1-2 4-3 5-4 5-5 12-6 8-7 9-9 11-10 17-11 18-12 13-13 16-15 16-16 16-17 19-18 +0-0 1-1 2-2 4-3 +0-0 2-1 3-2 5-3 4-4 7-5 8-6 9-7 10-8 11-9 12-10 13-11 14-12 14-13 15-14 16-15 +0-0 0-1 3-2 4-3 5-4 6-5 1-6 7-7 8-8 11-9 11-10 12-11 13-12 14-13 14-14 16-15 17-16 20-17 +3-0 3-1 6-2 6-3 7-4 9-5 10-6 +0-0 3-1 4-2 4-3 5-4 1-5 9-6 4-7 10-8 11-9 +0-0 1-1 2-2 5-3 4-4 7-5 6-6 7-7 8-8 13-9 15-10 16-11 17-13 17-14 18-15 19-16 22-17 23-18 24-19 25-20 26-21 27-22 32-23 34-24 30-25 36-26 39-27 38-28 41-29 42-30 43-31 44-32 45-33 46-34 46-35 49-36 +0-0 1-1 4-2 1-3 4-4 5-5 6-6 8-7 7-8 9-9 9-10 10-11 11-12 13-13 12-14 14-15 14-16 15-17 17-18 17-19 18-20 21-22 22-23 23-24 23-25 25-26 26-27 26-28 27-29 +0-0 1-1 1-2 2-3 4-4 5-5 6-6 8-7 7-8 +0-0 1-1 2-2 1-3 3-4 5-5 7-6 7-7 8-8 9-9 10-10 11-11 12-12 14-13 13-14 +0-0 2-1 2-2 2-3 4-4 6-5 7-6 7-7 8-8 12-9 12-10 9-12 13-13 +0-0 2-1 3-2 1-3 3-4 4-5 5-6 7-7 9-8 10-9 9-10 12-11 13-12 16-13 17-14 18-15 19-16 14-17 20-18 +1-0 3-1 4-2 2-3 6-4 8-5 9-6 11-7 9-8 12-9 12-10 7-11 14-12 14-13 16-14 17-15 +0-0 2-1 1-2 1-3 5-4 6-5 6-6 5-7 9-8 9-9 10-10 11-11 13-12 13-13 18-15 16-16 17-17 20-18 +0-0 0-1 0-2 2-3 2-4 3-5 4-6 5-7 6-8 7-9 8-10 7-11 7-12 8-13 9-14 +0-0 5-1 3-2 4-3 1-4 7-5 12-6 13-7 9-8 16-9 12-10 12-11 18-12 18-13 19-14 17-15 20-16 21-17 22-18 24-19 22-20 25-21 +0-0 5-1 5-2 1-3 4-4 3-5 7-6 +0-0 1-1 3-2 7-3 8-4 7-5 7-6 14-7 11-8 14-9 16-10 10-11 17-12 20-14 21-15 22-16 +1-1 2-2 7-3 4-4 3-5 9-6 8-7 9-8 10-9 11-10 13-11 14-12 15-13 15-14 17-15 18-16 18-17 19-18 22-19 20-20 20-21 21-22 22-23 25-24 26-25 28-26 31-27 32-28 29-29 35-30 33-31 34-32 +5-0 2-1 0-2 1-3 0-4 4-5 4-6 14-7 7-8 15-9 6-10 5-11 12-12 8-13 14-14 13-15 13-16 16-17 17-18 20-19 19-21 28-22 21-23 22-24 23-25 27-26 25-27 26-28 29-29 +1-0 4-1 4-2 6-3 5-4 4-5 8-6 7-7 6-8 7-9 11-10 2-11 11-12 11-13 13-14 +0-0 1-1 2-2 4-3 4-4 6-5 +0-0 0-1 1-2 2-3 3-4 4-5 5-6 4-7 7-8 +0-0 4-1 5-2 2-3 8-4 7-5 10-6 11-7 8-8 9-9 24-10 13-11 11-13 16-14 17-15 17-16 18-17 21-19 19-20 26-21 28-22 23-23 24-25 26-26 22-27 29-28 +0-0 2-1 3-2 6-3 5-4 8-5 7-6 9-7 10-8 10-9 14-10 16-11 18-12 19-13 20-14 21-15 22-16 23-17 25-18 26-19 27-20 +0-0 2-1 1-2 3-3 12-4 14-5 6-6 7-7 5-8 9-9 10-10 11-11 13-12 8-13 15-14 16-15 +0-0 1-1 3-2 4-3 +0-0 4-1 2-2 3-3 4-4 6-5 11-6 7-7 17-8 19-9 20-10 16-11 23-12 19-13 30-14 24-15 24-16 27-18 25-20 31-21 34-22 35-23 36-24 37-25 31-26 38-27 39-28 +0-0 1-1 1-2 6-3 5-4 7-5 7-6 11-7 13-8 12-9 12-10 14-11 15-12 16-13 15-14 18-15 +0-0 2-1 3-2 4-3 5-4 +0-0 1-1 5-2 2-3 5-4 5-5 6-6 6-7 7-8 10-9 12-10 13-11 13-12 14-13 15-14 +0-0 3-1 5-2 7-4 8-5 9-6 7-7 6-8 10-9 12-10 11-11 14-12 11-13 17-14 15-15 16-16 21-17 18-18 21-19 23-20 +0-0 1-1 2-2 5-3 5-4 6-5 5-6 8-7 10-8 9-9 11-10 +0-0 2-1 3-2 5-3 6-4 8-5 8-6 9-7 7-8 11-9 13-10 17-11 17-12 18-13 +0-0 2-1 2-2 3-3 5-4 1-5 5-6 0-7 2-8 9-9 10-10 13-11 11-12 13-13 15-15 12-16 13-17 15-18 18-19 20-20 +6-0 0-1 8-2 2-3 6-4 7-5 8-6 7-8 12-10 10-11 29-12 19-14 21-18 27-19 17-20 27-21 27-22 27-23 29-24 27-25 31-26 32-27 35-28 35-29 36-30 39-32 35-33 38-34 41-35 37-36 37-37 46-38 42-39 43-40 44-41 47-42 48-43 +0-0 1-1 1-2 2-3 3-4 4-5 5-6 6-7 +1-0 6-1 7-2 4-3 5-4 6-5 8-6 +0-0 1-1 3-2 6-3 5-4 8-5 13-7 20-8 21-9 19-10 31-11 17-13 25-14 22-15 22-16 26-17 23-18 29-19 33-21 33-22 34-23 +0-0 3-1 4-2 1-3 7-4 7-5 6-6 6-7 9-8 12-10 11-11 14-12 15-13 16-14 15-15 15-16 16-17 18-18 22-19 23-20 25-21 22-22 21-23 26-24 +0-0 1-1 3-2 4-3 8-4 9-5 5-6 11-7 10-8 6-9 12-11 13-12 14-13 14-14 17-16 18-17 20-18 21-19 22-20 23-21 26-22 21-23 26-24 26-25 25-26 30-27 32-28 27-29 33-31 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 6-7 7-8 8-9 21-10 11-11 18-12 13-13 14-14 15-15 16-16 14-17 18-18 18-20 21-21 22-22 23-23 23-24 24-25 +0-0 11-1 3-2 5-3 3-4 8-5 10-6 12-7 13-8 14-9 11-11 16-13 18-14 19-15 20-16 21-17 22-18 +0-0 9-1 3-2 4-3 1-4 8-5 9-6 3-7 7-8 15-9 14-10 16-11 19-12 19-13 20-14 +1-0 3-2 2-3 4-4 5-5 6-6 7-7 9-8 11-9 +2-0 1-1 2-2 16-3 6-4 8-5 5-6 7-7 10-8 11-9 14-11 16-12 15-13 20-14 20-15 23-16 24-17 26-18 27-19 25-20 28-21 28-22 30-23 34-24 33-25 35-26 39-27 29-28 38-29 29-30 40-31 +1-0 2-1 0-2 3-3 4-4 6-5 5-6 6-7 7-8 9-9 11-10 11-11 12-12 13-13 13-14 14-15 15-16 16-17 19-18 19-19 20-20 +0-0 1-1 2-2 0-3 2-4 3-5 4-6 6-7 6-8 8-10 12-11 9-12 10-13 12-14 13-15 +2-0 6-1 5-2 7-3 4-4 8-5 11-6 7-7 12-8 10-9 13-10 11-11 12-12 15-13 16-14 17-15 17-16 20-17 21-18 22-19 23-20 24-21 +0-0 1-1 2-2 3-3 5-4 0-5 6-6 4-7 8-8 9-9 10-10 +0-0 1-1 3-2 4-3 3-4 4-5 7-6 4-7 7-8 +1-0 1-1 3-2 3-3 5-4 6-5 +0-0 2-1 1-2 1-3 13-4 16-6 9-7 9-8 6-9 7-10 12-11 15-12 16-13 18-14 20-15 29-16 19-17 23-18 25-19 26-20 26-21 27-22 29-23 29-24 33-25 31-26 34-27 35-28 +0-0 1-1 2-2 11-3 3-4 3-5 5-6 1-9 2-10 11-11 12-13 4-14 12-15 14-16 8-17 11-18 16-20 24-21 19-22 21-23 24-24 24-26 24-27 26-28 28-29 28-30 29-31 31-32 32-33 33-34 34-36 35-37 36-38 35-39 38-40 38-41 38-42 40-43 41-44 +1-0 1-1 2-2 3-3 5-4 8-5 6-6 7-7 10-8 11-9 12-10 11-11 13-12 13-13 14-14 16-15 +0-0 7-1 2-4 6-6 13-9 6-10 10-11 13-12 7-13 11-14 12-15 17-16 18-17 14-18 18-19 15-20 16-21 23-22 24-23 27-24 20-25 27-26 24-27 29-28 32-29 31-30 33-31 34-32 +0-0 2-1 1-2 3-3 4-4 5-5 6-6 8-7 8-8 10-9 11-10 12-11 13-12 15-13 15-14 16-15 17-16 17-17 18-18 21-19 23-20 20-21 24-22 23-23 19-24 27-25 28-26 +1-1 1-2 1-3 2-4 3-5 5-7 6-8 7-9 7-10 14-12 11-13 12-14 13-15 10-16 15-17 9-18 17-19 8-20 17-22 19-23 20-24 21-25 19-26 18-27 23-30 26-31 20-32 31-34 28-36 30-37 30-38 22-39 33-40 36-42 38-43 39-44 37-45 38-46 34-47 40-48 43-49 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 10-7 9-8 11-9 11-10 14-12 15-13 16-14 17-15 20-16 21-17 21-18 23-19 24-20 +0-0 2-1 2-2 1-3 3-4 4-5 6-6 5-7 7-8 8-9 9-10 11-11 12-12 13-13 15-14 15-15 15-16 16-17 18-18 18-19 19-20 20-21 +1-0 1-1 2-2 3-3 4-4 5-5 +0-0 1-1 2-2 3-3 5-4 6-5 6-6 7-7 9-8 10-9 10-10 12-11 15-12 12-13 16-14 17-15 18-16 11-17 20-18 23-19 22-20 22-21 24-22 25-23 26-24 28-25 29-26 25-27 30-28 30-29 +0-0 1-1 3-2 2-3 3-4 5-5 5-6 7-7 8-8 10-10 11-11 12-12 13-13 14-14 +0-0 1-1 3-2 4-3 4-4 6-5 7-6 9-7 10-8 12-9 11-10 14-11 14-12 16-13 17-14 18-15 +0-0 4-1 4-2 5-3 6-4 1-5 7-6 11-7 8-8 13-9 12-10 12-11 17-12 16-13 19-14 20-15 11-16 21-17 15-18 22-19 22-20 25-21 27-22 28-23 29-24 +1-0 1-1 3-2 2-3 4-4 4-5 6-6 6-7 8-8 8-9 9-10 +1-0 3-1 3-2 1-3 4-4 9-6 3-7 10-8 10-9 15-10 18-12 20-13 14-14 20-15 20-16 26-17 21-18 27-19 21-20 28-21 +0-0 0-1 1-2 2-3 4-4 6-6 5-7 9-8 9-9 10-10 +0-0 2-2 4-3 33-4 6-6 4-7 3-8 11-9 7-10 8-11 9-12 10-13 11-14 0-15 17-16 17-17 20-18 21-19 22-20 24-21 25-22 28-23 27-24 29-25 30-26 31-27 32-28 34-29 35-30 +2-0 7-1 2-2 0-3 3-5 8-6 9-7 10-8 14-10 6-11 12-13 14-14 15-15 16-16 17-17 18-18 20-19 21-20 22-21 23-22 24-23 25-24 26-25 27-26 +2-0 2-1 0-2 10-3 5-4 6-5 4-6 6-7 1-8 0-9 7-10 9-11 9-13 11-14 12-15 12-16 15-17 14-18 15-19 14-20 18-21 18-22 19-23 +0-0 5-1 2-2 4-3 7-4 8-6 9-7 10-8 +0-0 1-1 2-2 2-3 5-4 7-5 6-6 9-7 8-8 10-9 11-10 13-11 14-12 14-13 17-14 18-15 +3-0 1-1 1-2 7-3 4-4 6-5 7-6 7-7 11-8 9-9 12-10 10-11 13-12 12-13 16-15 21-16 18-17 19-18 20-19 21-20 23-21 27-22 27-23 28-24 33-25 30-26 30-27 31-28 32-29 29-30 37-31 38-32 40-33 41-34 42-35 +1-0 1-1 6-2 3-3 2-4 4-5 6-6 7-7 8-8 +0-0 1-1 2-2 2-3 3-4 4-5 8-6 7-7 9-8 12-9 14-10 19-11 14-12 22-13 20-14 24-15 22-16 27-17 29-18 28-19 30-20 25-21 31-22 32-23 30-24 34-25 35-26 36-27 29-28 37-29 39-30 29-31 40-32 +1-0 4-1 5-2 6-3 8-4 6-6 10-7 6-8 6-9 8-10 1-12 13-13 13-14 13-15 13-16 15-17 14-18 14-19 16-20 13-21 18-22 19-23 +0-0 1-1 2-2 4-3 4-4 7-5 +0-0 0-1 1-2 2-3 3-4 4-5 4-6 6-7 5-8 7-9 8-10 9-11 9-12 10-13 11-14 12-15 13-16 14-17 15-18 +0-0 15-1 16-2 4-3 3-7 5-8 6-9 12-10 9-11 12-12 16-13 16-14 17-15 +0-0 2-1 2-2 1-3 4-4 4-5 6-6 9-7 11-8 14-9 7-10 8-11 15-12 9-13 13-14 10-15 11-16 17-17 18-18 23-19 23-20 24-21 25-22 +0-0 26-1 2-2 2-3 6-4 6-5 4-6 11-8 10-10 14-11 18-12 17-13 20-14 20-15 21-16 22-17 24-19 25-21 27-22 28-23 29-24 32-25 34-26 35-27 36-28 38-30 39-31 40-32 +1-0 1-1 2-2 4-3 4-4 6-5 7-6 9-7 11-8 12-9 10-10 13-12 18-13 17-14 19-15 19-16 20-17 21-18 +0-0 1-1 2-2 4-3 4-4 8-5 7-6 15-7 8-8 9-9 10-10 11-11 16-12 17-13 16-14 19-15 13-16 14-17 15-18 20-19 21-20 22-21 23-22 24-23 26-24 24-25 29-26 30-27 31-28 +0-0 1-1 3-2 2-3 4-4 5-5 6-6 7-7 8-8 10-9 11-10 13-11 14-12 14-13 20-14 19-15 20-16 21-17 +0-0 1-1 2-2 2-3 5-4 5-5 6-6 7-7 12-8 7-9 12-10 16-11 14-12 16-13 16-14 16-15 17-16 18-17 19-18 23-19 21-20 23-21 23-22 26-23 29-24 29-25 28-26 29-27 28-29 34-30 28-31 35-32 36-33 +0-0 1-1 1-2 2-3 4-4 5-5 6-6 7-7 7-8 8-9 +0-0 1-1 4-2 4-3 3-4 6-6 7-7 8-8 8-9 10-10 11-11 12-12 12-13 13-14 14-15 15-16 15-17 16-18 18-19 19-20 19-21 20-22 21-23 +0-0 1-1 2-2 2-3 3-4 3-5 4-6 5-7 +0-0 1-1 2-2 4-3 +0-0 0-1 1-2 1-3 2-4 2-5 4-6 5-7 +0-0 1-1 2-2 4-3 4-5 4-6 5-7 7-8 8-9 8-11 9-12 10-13 11-14 14-15 14-16 15-17 15-18 18-19 19-20 17-21 17-22 19-23 20-24 +0-0 1-1 2-2 3-3 4-4 7-5 9-6 8-7 10-8 11-9 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 7-8 8-9 8-10 7-11 9-12 12-13 10-14 12-15 13-16 15-17 16-18 16-19 17-20 +1-0 1-1 3-2 0-3 6-4 4-5 10-7 11-8 +1-0 1-1 2-2 4-4 5-5 6-6 +0-0 0-1 2-2 5-3 4-4 4-5 7-7 9-8 9-9 10-10 +0-0 3-1 4-2 7-4 5-5 6-6 4-7 8-8 9-9 12-10 11-11 17-13 14-14 15-15 13-16 17-17 22-18 19-19 23-20 22-21 25-22 28-24 28-25 31-29 33-30 34-31 35-32 +0-0 0-1 2-2 2-3 5-4 6-5 7-6 8-7 +0-0 1-1 1-2 0-3 3-4 4-5 6-6 7-7 5-8 7-9 8-10 9-11 10-12 10-13 13-14 11-15 12-16 15-17 15-18 16-19 +0-0 1-1 2-2 3-3 5-4 4-5 4-6 5-7 6-8 8-9 8-11 13-12 15-14 15-15 16-16 17-17 18-18 20-19 +0-0 1-1 4-3 3-4 4-5 5-6 8-7 12-8 9-9 10-10 12-11 13-12 14-13 15-14 16-15 19-16 19-18 19-19 21-20 25-21 24-22 26-23 +0-0 1-1 4-2 1-3 2-4 3-5 7-6 11-7 14-8 14-9 16-10 +0-0 1-1 2-2 8-3 10-4 6-5 6-6 7-7 13-8 10-9 17-10 14-11 11-13 13-15 1-16 18-18 20-19 21-20 21-21 23-22 24-23 25-24 +1-0 1-1 2-2 3-3 4-4 4-5 6-6 8-7 8-8 9-9 +0-0 1-1 2-2 2-3 6-4 5-5 7-6 8-7 8-8 11-9 12-10 14-11 16-12 18-13 17-14 18-15 19-16 +0-0 1-1 1-2 2-3 6-5 8-6 9-7 9-8 10-9 10-10 14-11 15-12 +0-0 2-1 4-2 6-3 4-4 6-5 7-6 8-7 9-8 10-9 11-10 12-11 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 2-7 8-8 13-9 14-10 15-11 15-12 15-13 17-14 18-15 +2-0 2-1 4-2 3-3 5-4 6-5 10-8 10-9 11-10 12-11 12-12 13-13 14-14 16-15 19-16 19-17 19-18 20-19 21-20 22-21 +1-0 2-1 1-3 5-4 1-5 7-6 7-7 6-8 10-9 13-10 +1-0 1-1 8-2 1-3 4-4 4-5 5-6 5-7 7-8 10-10 10-11 10-12 12-13 +0-0 2-1 6-2 10-3 4-4 1-5 8-6 7-7 8-8 12-9 18-10 13-11 14-12 15-13 14-14 21-15 +0-0 1-1 4-2 4-3 2-4 4-5 5-6 6-7 8-8 10-9 7-10 12-11 12-12 13-13 14-14 17-16 18-17 20-18 21-19 21-20 22-21 +0-0 4-1 3-2 4-3 5-4 3-5 10-6 7-7 9-8 12-10 13-11 15-13 14-14 16-15 17-16 18-17 19-18 21-19 20-20 22-21 +0-0 1-1 4-2 5-3 4-4 8-5 7-6 12-7 11-8 14-9 12-10 14-11 15-12 +2-0 2-1 3-2 3-3 5-4 +0-0 2-1 3-2 3-3 5-4 6-5 +0-0 0-1 1-2 2-3 3-4 4-5 +0-0 1-1 2-2 3-3 +0-0 5-1 2-2 6-3 6-4 7-5 +0-0 1-1 2-2 3-3 2-5 5-6 4-7 5-8 7-9 9-10 9-11 10-12 11-13 +0-0 0-1 2-2 0-3 4-4 5-5 8-6 12-7 8-8 9-9 10-10 11-11 12-12 12-13 15-14 16-15 17-16 18-17 18-18 19-19 +0-0 1-1 2-2 +0-0 2-1 2-2 3-3 5-4 7-5 8-6 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 10-7 11-9 13-10 15-11 17-12 18-13 +0-0 1-1 4-2 5-3 6-4 7-5 7-6 9-7 10-8 11-9 +0-0 0-1 2-2 3-3 4-4 5-5 6-6 8-7 7-8 9-9 11-10 13-11 15-12 10-13 14-14 16-15 17-16 +0-0 2-1 2-2 4-3 4-4 5-5 8-6 9-7 10-8 11-9 12-10 12-11 13-12 14-13 15-14 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 +0-0 1-1 2-2 3-3 4-4 8-6 5-7 10-8 11-9 6-10 7-11 8-12 9-13 14-14 12-15 13-16 7-17 14-18 15-19 16-20 17-21 20-23 20-24 26-25 23-26 19-27 23-29 24-30 28-31 28-32 29-33 30-34 31-35 32-36 33-37 34-38 35-39 +0-0 1-1 3-2 3-3 5-4 6-5 8-6 9-7 +0-0 1-1 3-3 4-4 4-6 9-7 7-8 8-9 10-10 8-11 14-12 7-13 13-14 13-15 15-16 +2-0 2-1 3-2 5-4 4-5 7-6 6-7 11-8 13-9 14-10 +0-0 1-1 2-2 2-3 3-4 4-5 7-6 8-7 6-8 9-9 11-10 13-11 14-12 14-13 13-14 14-15 15-16 +0-0 1-1 2-3 3-4 4-5 5-6 6-7 7-8 7-9 7-10 9-11 9-12 11-13 11-14 11-15 13-16 +0-0 1-1 1-2 2-3 3-4 4-5 9-7 6-8 7-9 8-10 7-11 9-12 10-13 10-14 12-15 +1-0 0-1 1-2 2-3 3-4 4-5 6-6 5-7 9-8 7-9 9-10 10-11 12-12 13-13 13-14 14-15 15-16 16-17 18-18 19-20 +0-0 10-1 1-2 7-3 8-4 12-5 13-6 14-7 9-8 15-9 8-10 11-11 9-12 16-13 17-14 18-15 18-16 20-17 20-18 +0-0 1-1 3-2 4-3 5-4 7-5 8-6 9-7 10-8 12-9 11-10 10-11 16-12 18-13 17-14 19-15 20-16 21-17 22-18 23-19 24-20 25-21 +4-0 3-1 3-2 5-3 13-4 6-5 11-7 12-8 9-9 15-11 1-12 16-14 18-15 19-16 20-17 21-18 22-19 23-20 24-21 25-22 +0-0 2-1 4-2 5-3 19-4 11-6 6-7 21-8 8-9 24-10 23-11 25-12 17-13 26-14 27-15 17-16 30-17 21-18 20-19 33-20 28-22 38-23 40-25 31-26 32-27 34-28 19-29 18-30 40-31 37-32 40-33 41-34 +2-0 4-1 4-2 4-3 1-4 3-5 7-6 8-7 9-8 10-9 12-10 14-11 15-12 16-13 12-14 10-15 17-16 +0-0 3-1 2-2 4-3 5-4 5-5 6-6 8-7 9-8 10-9 11-10 14-11 13-12 15-13 16-14 18-16 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 15-12 14-13 12-14 14-15 21-18 17-19 23-20 24-21 23-22 22-23 20-24 19-25 20-26 26-27 28-28 29-29 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 +0-0 3-1 2-2 5-3 5-4 5-5 5-6 6-7 9-8 4-10 11-11 15-12 12-13 15-14 16-15 18-16 +0-0 1-1 4-2 5-3 7-4 7-5 8-6 10-7 11-8 12-9 +0-0 1-1 0-2 2-3 4-4 5-5 8-7 14-8 6-9 9-10 12-11 9-12 12-13 13-14 15-15 +0-0 3-1 3-2 4-3 7-6 6-7 8-8 9-9 10-10 11-11 12-12 13-13 14-14 15-15 19-17 20-18 21-19 22-21 +0-0 1-1 3-2 9-3 7-4 5-5 13-6 7-7 8-8 10-9 10-10 6-11 12-12 15-13 16-14 16-15 18-16 18-17 18-18 20-19 +0-0 1-1 3-2 2-3 5-5 6-6 6-7 9-8 7-9 8-10 10-11 11-12 +0-0 1-1 2-2 5-3 6-4 4-5 7-6 8-7 9-8 11-9 13-10 14-11 16-12 17-13 17-14 16-15 18-16 +1-0 1-1 3-2 10-3 10-4 4-5 5-6 9-7 8-8 7-9 9-10 8-11 13-12 14-13 15-14 14-15 15-16 16-17 17-18 18-19 19-20 +0-0 2-1 1-2 3-3 4-4 5-5 6-6 6-7 3-8 11-9 10-10 12-12 14-13 15-14 16-15 17-16 +1-0 0-1 8-2 8-3 4-4 5-5 5-6 6-7 8-8 2-9 10-10 11-11 11-12 12-13 +0-0 0-1 2-2 3-3 3-4 4-5 5-6 6-7 6-8 7-9 7-10 9-11 10-12 +0-0 3-1 2-2 2-3 4-4 5-5 3-6 8-7 17-8 8-9 9-10 10-11 12-12 13-13 14-14 18-15 16-16 7-17 16-18 9-19 19-20 +0-0 1-1 1-2 3-3 3-4 5-5 5-6 +0-0 0-1 0-2 2-3 3-4 4-5 7-6 12-9 22-10 10-12 12-13 14-14 15-15 17-17 18-18 20-19 19-20 19-21 22-22 23-23 +4-1 1-2 12-3 9-4 9-5 5-6 8-7 12-8 15-11 18-12 29-13 20-14 22-15 25-16 27-17 29-18 30-19 43-20 46-21 34-22 35-23 44-24 38-25 33-26 40-27 36-28 37-29 45-30 39-31 48-32 +1-0 0-1 2-2 2-3 7-5 4-6 5-7 9-8 10-9 11-10 11-11 13-12 14-13 +0-0 2-1 1-2 2-3 6-4 7-5 19-6 9-7 11-8 14-10 14-11 24-12 3-14 19-15 21-17 12-18 25-19 +0-0 0-1 0-2 1-3 3-4 4-5 5-6 5-7 6-8 7-9 8-10 +0-0 2-1 2-2 3-3 4-4 5-5 +0-0 0-1 4-3 14-5 15-6 11-8 12-9 10-10 11-11 16-12 17-13 7-14 17-15 20-16 23-17 24-18 22-19 23-20 25-21 27-22 28-24 30-25 31-26 +0-0 0-1 1-2 2-3 5-4 4-5 6-6 7-7 6-8 8-9 9-10 11-11 15-12 13-13 12-14 12-15 13-16 17-17 +0-0 0-1 2-2 2-3 3-4 +0-0 3-1 4-2 5-3 6-4 7-5 7-6 0-7 1-8 10-9 10-10 10-11 12-12 12-13 13-14 17-15 15-16 17-17 17-18 20-19 19-20 22-21 21-22 28-23 25-24 20-25 26-26 28-27 20-28 29-29 30-30 +1-0 1-1 1-2 3-3 4-4 2-5 3-6 5-7 7-8 8-9 5-10 12-11 11-12 12-13 11-14 13-15 14-16 +0-0 4-1 2-2 3-3 5-4 6-5 7-6 9-7 10-8 12-9 14-10 17-11 16-12 19-13 20-14 21-15 21-16 22-17 +0-0 1-1 1-2 2-3 5-4 3-5 4-6 5-7 8-8 6-9 8-10 9-11 5-12 10-13 13-14 11-15 13-16 14-17 15-18 16-19 17-20 17-21 18-22 +0-0 3-1 4-2 2-3 5-4 7-5 6-6 8-7 9-8 8-9 10-10 11-11 12-12 13-13 12-14 16-16 16-17 17-18 18-19 19-20 20-21 21-22 22-23 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 6-7 7-8 10-9 8-10 9-11 10-12 10-13 11-14 12-15 13-16 14-17 15-18 +0-0 2-1 2-2 5-3 7-4 8-5 9-6 10-7 11-8 11-9 14-10 14-11 15-12 +0-0 1-1 2-2 5-3 6-4 4-5 1-7 8-8 11-9 13-10 13-11 15-12 16-13 17-14 +0-0 2-1 2-2 2-3 5-4 3-5 6-6 7-7 8-8 9-9 11-10 13-11 14-12 15-13 18-14 20-15 21-16 19-17 16-18 22-19 +1-0 2-1 2-2 3-3 5-4 6-5 5-6 4-7 5-8 11-9 11-10 9-11 19-12 15-13 17-14 13-15 17-16 18-17 19-18 16-19 23-20 24-21 +0-0 1-1 6-3 7-4 6-5 7-6 9-7 10-8 15-9 13-10 14-11 18-12 19-13 23-14 24-15 19-16 26-17 21-19 29-20 25-21 32-22 28-23 29-24 36-26 35-27 33-28 36-29 35-30 37-32 35-33 43-34 44-35 +0-0 1-1 10-2 3-3 5-4 7-5 9-6 11-7 12-8 13-9 13-10 15-11 18-12 20-13 20-14 21-15 24-16 24-17 25-18 26-19 +0-0 1-1 2-2 4-3 3-4 6-5 7-6 7-7 8-8 +1-0 5-1 7-2 5-3 10-4 8-5 12-6 9-7 0-8 19-9 11-10 17-11 15-12 19-14 20-15 20-16 20-17 25-18 25-19 25-20 28-21 27-22 30-23 29-24 33-25 34-26 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 10-8 15-9 13-10 14-11 16-12 12-13 14-14 17-15 21-16 16-17 17-18 17-19 19-20 25-21 21-23 23-24 24-25 27-26 28-28 27-29 29-30 +0-0 2-1 4-2 4-3 6-4 6-5 2-6 17-7 18-8 11-9 12-10 12-12 15-13 16-14 13-15 18-16 19-17 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 8-7 8-8 9-9 10-10 7-11 14-12 12-13 11-14 12-15 11-16 14-17 17-18 16-19 16-20 18-21 19-22 20-23 18-24 21-25 22-26 +1-0 1-1 1-2 1-3 3-4 3-5 4-6 5-7 3-8 4-9 7-10 9-11 10-12 9-13 9-14 11-15 9-16 11-17 12-18 15-19 13-20 15-21 16-22 +0-0 1-1 2-2 3-3 6-4 5-5 6-6 7-7 9-8 11-9 12-10 13-11 10-12 14-13 +0-0 3-1 2-2 2-3 5-4 5-5 2-6 1-7 7-8 8-9 9-10 6-11 11-12 12-13 13-14 14-15 +1-0 2-2 2-3 5-4 6-5 5-6 6-7 3-8 9-9 10-10 +0-0 4-1 2-2 3-3 5-4 6-5 8-6 9-7 +0-0 3-1 4-2 1-3 2-4 8-5 9-6 8-7 10-8 9-9 13-10 14-11 14-12 16-13 17-14 16-15 24-16 18-17 22-18 21-19 24-20 25-21 +0-0 1-1 3-2 4-3 6-4 7-5 5-6 8-7 11-8 12-9 +0-0 0-1 1-2 2-3 4-4 5-5 5-6 +0-0 1-1 2-2 3-3 4-4 6-5 5-6 7-7 7-8 10-9 12-10 12-11 13-12 +0-0 1-1 2-2 3-3 4-4 9-5 8-6 5-7 9-8 11-9 14-10 15-11 15-12 17-13 17-14 19-15 20-16 +0-0 1-1 4-2 1-3 6-4 7-5 8-6 9-7 9-9 10-10 11-11 11-12 13-13 16-14 16-15 17-16 19-17 20-18 21-19 19-20 22-21 22-22 23-23 +0-0 1-1 2-2 6-3 3-4 4-5 4-6 9-7 5-8 11-9 8-10 14-11 11-12 16-13 18-14 19-15 17-16 10-17 6-18 28-19 28-20 29-21 30-22 28-24 34-25 32-26 29-27 26-28 31-29 24-30 27-31 35-32 +1-0 1-1 3-2 8-3 6-4 8-5 6-6 4-7 17-8 4-9 5-10 12-11 12-12 16-13 17-14 17-15 12-16 24-17 7-18 17-19 18-20 23-21 23-22 22-23 +0-0 2-1 3-2 3-3 6-4 0-5 5-6 7-7 8-8 9-9 9-10 10-11 11-12 12-13 13-14 14-15 15-16 16-17 17-18 18-19 19-20 20-21 21-22 22-23 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 11-11 13-12 14-13 15-14 16-15 17-16 18-17 +0-0 1-1 3-2 5-3 5-4 6-5 14-6 15-7 10-8 18-9 13-10 19-11 17-12 17-13 17-14 17-15 19-16 24-17 25-18 26-19 24-20 27-21 +0-0 1-1 5-2 8-3 9-4 9-5 9-6 +0-0 1-1 2-2 3-3 5-4 7-5 7-6 9-7 10-8 12-9 11-10 12-11 16-12 16-13 17-14 +1-0 1-1 3-2 5-3 7-4 8-5 6-6 0-7 4-8 10-9 11-10 12-11 13-12 +0-0 1-1 2-2 4-3 8-4 7-5 10-6 7-7 12-8 15-9 16-10 15-11 14-12 16-13 17-14 +0-0 1-1 5-2 6-3 8-5 10-6 12-7 13-8 11-9 20-10 22-11 18-12 17-13 18-14 19-15 25-16 +0-0 1-1 2-2 3-3 6-4 7-5 8-6 8-7 10-8 4-9 10-10 11-11 12-12 13-13 14-14 14-15 15-16 16-17 18-18 19-19 20-20 21-21 +0-0 2-1 4-2 6-3 6-4 9-5 10-6 14-7 12-8 15-9 19-11 19-12 18-13 22-14 25-15 27-16 28-17 28-18 30-19 30-20 33-21 35-22 35-23 +0-0 1-1 2-2 5-3 +1-0 1-1 2-2 2-3 5-4 4-5 7-6 7-7 9-8 10-9 +0-0 1-1 4-2 3-3 5-4 5-5 11-6 8-7 12-8 13-9 16-10 10-11 16-12 23-13 18-15 19-16 27-18 28-19 28-20 +0-0 1-1 2-2 2-3 0-4 3-5 5-6 7-7 8-8 9-9 10-10 11-11 10-12 12-13 14-14 11-15 15-16 15-17 16-18 17-19 +1-0 0-1 3-2 1-3 4-4 6-5 7-6 6-7 8-8 11-9 12-10 14-11 14-12 14-13 15-14 +1-0 1-1 2-2 0-3 1-4 10-5 8-6 6-7 13-8 7-9 11-10 12-11 12-12 14-13 +2-0 6-1 3-2 2-3 3-4 4-5 7-6 10-7 14-8 15-9 13-10 9-11 17-12 18-13 +0-1 7-2 4-3 5-4 5-5 3-6 7-7 6-8 12-9 10-10 10-11 11-12 9-13 13-14 14-15 15-16 20-17 17-18 22-21 16-23 28-24 23-25 27-26 28-27 25-28 23-29 27-30 27-31 29-32 +0-0 1-1 2-2 3-3 4-4 4-5 9-6 11-7 3-8 10-9 16-10 13-11 13-12 15-13 17-14 18-15 18-16 +0-0 1-1 2-2 2-3 4-4 5-5 5-6 6-7 8-8 8-9 10-10 9-11 12-12 11-13 14-14 16-15 15-16 17-17 +0-0 0-1 3-2 4-3 6-4 20-5 4-6 7-7 9-8 7-9 8-10 13-11 12-12 14-13 15-14 16-15 15-16 19-17 19-18 21-19 +0-0 3-1 1-2 5-3 3-4 5-5 6-6 6-7 9-8 10-9 11-10 14-11 7-12 16-13 15-14 16-17 19-18 21-19 17-20 22-21 22-22 22-23 +0-0 1-1 1-2 6-3 4-4 4-5 7-6 7-7 7-8 8-9 +0-0 0-1 2-2 5-3 5-4 7-5 8-6 9-7 10-8 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 6-11 21-12 13-13 16-15 17-16 18-17 21-18 22-19 +0-0 4-1 4-2 4-3 1-5 16-8 16-9 17-10 18-11 18-12 19-13 21-14 22-15 21-16 29-17 22-18 12-19 30-20 33-22 34-23 32-24 36-25 45-27 36-28 40-30 45-31 45-32 47-33 +1-0 1-1 3-2 7-3 5-4 9-5 9-6 10-7 11-8 14-10 15-11 16-12 +0-0 1-1 3-2 8-3 2-4 5-5 11-6 8-7 4-8 10-9 5-11 14-12 14-14 15-15 15-16 16-17 20-19 21-20 17-21 23-22 23-23 24-24 25-25 +2-0 3-1 2-2 2-3 0-4 6-5 8-6 8-7 11-8 9-9 13-10 +1-0 1-1 3-2 2-3 4-4 5-5 5-6 7-7 7-8 8-9 +1-0 17-1 3-2 4-4 5-5 13-9 10-10 34-11 26-12 14-13 15-14 27-15 16-16 14-17 17-18 23-19 12-20 23-22 19-23 19-24 29-25 29-26 28-27 29-28 29-29 30-30 30-31 33-32 32-33 33-34 35-35 +0-0 1-1 3-2 4-3 +1-0 1-1 3-2 4-3 5-4 10-5 7-6 8-7 13-8 14-9 15-10 16-11 17-12 +0-0 0-1 3-3 4-4 9-5 1-7 2-8 10-9 13-10 13-11 13-12 18-13 18-14 17-15 20-16 6-17 31-19 23-20 25-21 26-22 22-23 25-24 26-25 32-26 28-27 25-28 24-29 36-30 +0-0 1-1 2-2 5-3 3-4 8-5 9-6 9-7 11-8 +0-0 1-1 2-2 3-3 3-4 4-6 5-7 7-8 6-9 10-10 8-11 12-12 13-13 14-15 15-16 16-17 16-18 17-19 18-20 19-21 23-22 21-23 22-24 26-26 20-27 27-28 +1-0 2-1 3-2 8-3 3-4 6-5 8-6 10-7 12-8 11-9 14-10 15-11 +0-0 2-1 1-2 6-3 3-4 5-5 5-6 5-7 7-8 +0-0 1-1 3-2 3-3 4-4 6-5 7-6 9-7 10-8 12-9 14-10 14-12 16-13 19-14 17-15 21-16 26-17 27-18 18-19 23-20 27-21 28-22 +0-0 0-1 1-2 4-3 7-4 7-5 7-6 +1-0 2-1 4-2 6-3 4-5 5-6 6-7 7-8 6-9 7-10 2-11 9-12 9-13 11-14 11-15 15-16 15-17 16-18 16-19 +1-0 10-1 3-2 4-3 16-4 7-5 8-6 8-7 10-8 10-9 5-10 7-11 7-12 16-13 17-14 +0-0 1-1 2-2 2-3 4-4 6-5 6-6 8-7 8-8 9-9 9-10 10-11 11-12 15-13 17-14 13-15 15-16 18-17 +0-0 1-1 3-2 2-3 5-4 5-5 6-6 6-7 8-8 9-9 9-10 10-11 12-12 13-13 14-14 +0-0 2-1 2-2 4-3 5-4 7-5 7-6 8-7 +0-0 1-1 4-2 7-3 7-4 +0-0 1-1 2-2 4-3 8-4 6-5 5-6 6-7 10-8 10-9 11-10 10-11 11-12 14-13 15-14 +1-0 1-1 4-2 3-3 3-4 3-5 13-7 10-8 9-9 16-10 11-11 15-13 14-14 15-15 19-16 19-17 19-18 5-19 22-20 22-21 +0-0 1-2 3-3 4-4 5-5 5-6 4-7 4-8 7-9 10-10 8-11 11-12 11-13 13-14 10-16 17-17 16-18 18-19 19-20 +0-0 1-1 2-2 2-3 3-4 5-5 5-6 6-7 +0-0 1-1 2-2 3-3 +0-0 1-1 3-2 2-3 3-4 3-5 6-6 9-7 7-8 7-9 7-10 8-11 10-12 13-13 14-14 12-15 5-16 6-17 15-18 16-19 +0-0 3-1 9-2 4-3 5-4 9-5 10-6 16-7 13-8 18-9 12-10 16-11 11-12 21-13 21-14 21-15 23-16 +0-0 2-1 1-2 2-3 2-4 1-5 8-6 2-7 3-8 7-9 4-10 8-11 9-12 11-13 11-14 12-15 11-16 12-17 12-18 14-19 15-20 16-21 +0-0 1-1 3-2 5-3 5-4 6-5 +0-0 1-1 2-2 2-3 3-4 5-5 5-6 6-7 +0-0 1-1 3-2 4-3 5-4 +0-0 1-1 3-2 3-3 5-4 5-5 5-6 16-7 8-8 8-9 10-10 11-11 12-12 12-13 12-14 14-15 14-16 17-17 17-18 +1-0 2-1 6-2 4-3 5-4 6-5 6-6 14-7 11-8 26-9 13-10 12-11 17-12 15-13 14-14 18-15 18-16 20-17 23-19 24-20 23-21 23-22 7-23 28-24 29-25 +1-0 1-1 5-2 3-3 3-4 4-5 5-6 2-7 6-8 2-9 8-10 9-11 9-12 11-13 +0-0 1-1 3-2 5-3 6-4 4-5 5-6 7-7 10-8 11-9 12-10 13-11 8-12 16-13 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 +0-0 1-1 2-2 3-3 4-4 3-5 5-6 6-7 7-8 8-9 10-10 11-11 +1-0 1-1 3-2 0-3 2-4 6-5 4-6 8-7 8-8 7-9 10-10 10-11 11-12 12-13 13-14 14-15 15-16 19-18 +0-0 1-1 2-2 2-3 2-4 3-5 4-6 +1-0 1-1 1-2 2-3 2-4 3-5 3-6 4-7 5-8 6-9 +1-0 1-1 2-2 3-3 +0-0 1-1 0-2 3-3 6-4 8-6 12-7 13-8 +0-0 1-1 2-2 3-3 6-4 7-5 8-6 9-7 10-8 11-9 12-10 +0-0 2-1 4-2 5-3 5-4 8-5 +0-0 2-1 5-2 3-3 4-4 5-5 7-6 10-7 12-8 8-9 7-10 11-11 14-12 14-13 15-14 +0-0 1-1 9-2 2-3 14-4 3-5 7-6 7-7 19-8 8-9 9-10 12-11 21-12 22-13 18-14 16-15 17-16 15-17 16-18 20-19 21-20 23-21 21-22 25-23 26-24 25-25 28-26 28-27 +0-0 2-1 2-2 3-3 4-4 4-5 4-6 8-7 8-8 9-9 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 6-9 7-10 9-11 10-12 11-13 12-14 +0-0 7-1 8-2 3-3 10-4 11-5 8-6 7-7 4-8 6-9 10-10 10-11 13-12 12-13 14-14 +0-0 1-1 2-2 2-3 6-4 +1-0 1-1 3-2 7-3 5-4 6-5 6-6 12-7 2-8 11-9 13-10 13-11 5-12 6-13 17-14 18-15 +0-0 8-1 3-2 5-3 9-4 16-5 4-6 9-7 15-9 16-10 26-11 15-12 16-13 22-15 22-16 23-18 22-19 26-20 25-21 27-22 28-23 30-24 35-25 34-26 35-28 5-29 41-30 41-31 +3-0 4-1 3-2 5-3 3-4 2-5 8-6 4-7 4-8 11-9 12-10 13-11 17-12 15-13 15-14 19-15 19-16 18-17 21-18 22-19 +0-0 1-1 2-2 7-3 7-4 12-5 6-6 8-7 9-8 12-9 14-10 14-11 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 4-7 8-8 9-9 +0-0 1-1 2-2 3-3 4-4 +0-0 2-1 2-2 7-3 8-4 5-5 6-6 4-7 7-8 11-9 9-10 10-11 11-12 12-13 +1-0 2-2 4-3 8-5 9-6 10-7 9-8 11-9 19-10 10-11 15-12 14-13 17-14 18-15 20-16 18-17 20-18 24-19 25-20 +0-0 2-1 3-2 4-3 +0-0 1-1 3-2 2-3 2-4 10-6 6-7 7-8 12-9 14-10 14-11 1-12 15-13 16-14 19-15 +1-0 2-1 2-2 2-3 4-4 4-5 7-6 6-7 9-8 9-9 10-10 11-11 +0-0 0-1 2-2 4-3 5-4 6-5 +1-0 1-1 1-2 8-3 4-4 5-5 7-6 8-7 9-8 7-9 7-10 1-11 10-12 10-13 16-14 17-15 +0-0 2-1 7-2 2-3 6-4 12-5 5-6 14-7 13-8 12-9 12-10 20-11 12-12 16-14 24-15 28-17 23-18 23-19 26-20 24-21 26-22 5-23 30-24 30-25 +0-0 0-1 1-2 2-3 3-4 5-5 4-6 6-7 7-8 13-9 7-10 8-11 8-12 9-13 10-14 12-15 8-16 11-17 14-18 15-19 +1-0 0-1 1-2 2-3 3-4 3-5 4-6 +0-0 1-1 2-2 6-3 3-4 8-5 5-6 5-7 5-8 8-9 9-10 +0-0 0-1 0-2 1-3 +0-0 0-1 3-2 4-3 3-4 4-5 6-6 7-7 7-8 8-9 4-10 10-11 11-12 11-13 5-14 13-15 14-16 +0-0 3-1 25-2 27-3 3-4 6-5 3-6 30-7 12-9 13-10 9-11 13-12 17-13 13-14 19-15 16-16 22-17 22-18 25-19 24-20 26-22 23-23 31-24 31-25 +1-0 2-1 1-2 1-3 2-4 2-5 3-6 4-7 4-8 4-9 7-10 6-11 5-12 7-13 8-14 8-15 8-16 10-17 10-18 10-19 11-20 +0-0 1-1 3-2 4-3 5-4 4-5 8-6 6-7 7-8 8-9 8-10 9-11 10-12 11-13 +0-0 1-1 11-2 3-3 4-4 3-5 8-6 5-7 4-8 5-9 6-10 7-11 8-12 8-13 12-14 11-15 12-16 12-17 13-18 +0-0 2-1 3-2 +0-0 1-1 2-2 4-3 5-4 5-5 5-6 6-7 7-8 +0-0 8-1 2-2 10-3 5-4 13-5 3-6 9-7 11-8 10-9 10-10 12-11 5-12 15-13 16-14 +0-0 7-1 3-2 9-3 4-4 4-5 1-6 11-7 16-8 12-9 13-10 15-11 17-12 18-13 +0-0 1-1 1-2 3-3 4-4 7-5 8-6 7-7 9-8 10-9 11-10 13-11 12-12 14-13 15-14 +0-0 1-1 2-2 3-3 4-4 5-5 +1-0 1-1 1-2 2-3 3-4 4-5 5-6 6-7 7-8 +0-0 1-1 3-2 2-3 1-4 7-5 8-6 5-7 7-8 10-9 11-10 12-11 4-12 13-13 +0-0 3-1 5-2 2-3 6-4 7-5 8-6 +0-0 2-1 4-2 3-3 3-4 9-5 10-6 7-7 12-8 6-9 7-10 12-11 13-12 +0-0 8-1 9-2 5-3 19-4 4-5 13-6 10-7 12-8 11-9 11-10 20-11 21-12 14-13 24-14 12-15 19-16 23-17 22-18 25-19 24-20 5-21 17-22 28-23 +0-0 1-1 2-2 2-3 4-4 5-5 7-6 9-7 9-8 11-9 8-10 12-11 12-12 15-13 14-14 15-15 16-16 +0-0 1-1 2-2 8-3 4-4 4-5 6-6 6-7 8-8 9-9 10-10 +0-0 2-1 8-2 6-3 6-4 8-5 +0-0 1-1 2-2 4-3 4-4 4-5 7-6 9-7 8-8 8-9 7-10 13-11 9-12 10-13 11-14 11-15 17-16 7-17 8-18 19-19 20-20 +1-0 9-1 3-2 5-3 4-4 5-5 9-6 9-7 11-8 10-9 12-10 15-11 5-12 13-13 19-15 19-16 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 11-9 10-10 12-11 12-12 15-13 16-14 15-15 17-16 +0-0 3-1 3-2 2-3 3-4 3-5 6-6 6-7 9-9 10-11 13-12 11-13 14-14 15-15 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 6-7 7-8 +0-0 1-1 2-2 2-3 4-4 +0-0 1-1 3-2 3-3 3-4 5-5 5-6 15-7 5-8 8-9 10-10 9-11 13-12 14-13 10-14 11-15 12-16 12-17 16-18 8-19 9-20 18-21 19-22 +1-0 12-1 3-2 17-3 3-4 4-5 14-6 18-7 10-8 20-9 12-10 13-11 15-12 5-14 17-15 7-16 23-17 23-18 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 7-7 10-8 8-9 8-10 11-11 11-12 10-13 13-14 13-15 15-16 16-17 17-18 +0-0 2-1 4-2 4-3 +0-0 1-1 3-2 7-3 5-4 5-5 5-6 9-7 10-8 11-9 10-10 12-11 13-12 +0-0 2-1 2-2 2-3 6-4 5-5 5-6 8-7 9-8 7-9 9-10 3-11 11-12 12-13 13-14 +2-0 2-1 0-2 2-3 4-4 2-5 4-6 7-8 7-9 6-10 8-11 11-12 12-13 12-14 13-15 9-16 7-17 14-18 +0-0 1-1 1-2 1-3 2-4 2-5 3-6 3-7 4-8 5-9 5-10 6-11 +0-0 1-1 2-2 3-3 4-4 5-5 8-6 8-7 9-8 +0-0 3-1 4-2 5-3 6-4 +3-0 4-1 1-2 6-3 7-4 8-5 4-6 7-7 7-8 11-9 7-10 11-11 9-12 1-13 10-14 12-16 14-17 14-19 15-20 15-21 12-22 16-23 17-24 +1-0 1-1 1-2 5-3 6-4 10-5 8-6 8-7 7-8 10-9 7-10 15-11 11-12 12-13 15-14 14-15 13-16 14-17 14-18 16-19 18-20 22-21 21-22 19-23 24-24 22-25 24-26 25-27 +0-0 2-1 4-2 5-3 7-4 7-5 10-6 11-7 12-8 13-9 14-10 15-11 +0-0 2-1 2-2 6-3 9-4 10-5 10-6 12-7 13-8 13-9 12-10 16-11 17-12 15-13 21-14 21-15 20-16 22-17 +0-0 1-1 2-2 4-3 5-5 7-6 7-7 7-8 8-9 9-10 12-11 15-12 12-13 11-14 11-15 16-16 +0-0 1-1 3-2 5-3 +3-0 2-1 2-2 4-3 5-4 7-5 9-6 10-7 12-8 12-10 14-11 17-12 17-13 18-14 +7-0 8-1 2-2 3-3 8-4 7-5 13-6 8-7 7-8 11-9 12-10 12-11 19-12 17-13 23-14 18-15 19-16 19-17 21-18 5-19 24-20 24-21 +1-0 1-1 1-3 2-4 2-5 4-6 5-7 7-8 6-9 9-10 9-11 8-12 13-13 11-14 12-15 11-17 13-18 14-19 +0-0 0-1 1-2 1-3 6-4 8-5 4-6 6-7 5-8 6-9 7-10 9-12 7-13 10-14 +0-0 2-1 5-2 5-3 7-4 +0-0 3-1 4-2 4-3 4-4 1-5 7-6 7-7 16-8 16-10 11-11 13-12 12-13 13-14 10-15 17-16 17-17 +1-0 1-1 1-2 5-3 6-4 1-5 7-6 8-7 3-8 10-10 11-11 12-12 13-13 13-14 14-16 23-17 16-19 17-20 18-21 20-22 21-23 18-24 20-25 22-26 22-27 24-28 25-29 25-30 27-31 +0-0 1-1 1-2 4-3 5-4 4-5 6-6 7-7 12-8 9-9 11-10 11-11 10-12 14-13 7-14 16-15 +0-0 1-1 2-2 3-3 4-4 +0-0 1-1 2-2 2-3 5-5 7-6 4-7 7-8 8-9 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 8-7 +0-0 1-1 2-2 3-3 6-4 6-5 9-6 9-7 11-8 10-9 13-10 13-11 13-12 15-13 15-14 17-15 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 6-8 8-9 9-10 15-11 11-12 12-13 14-14 16-16 16-17 14-18 18-19 +1-0 3-1 4-2 5-3 6-4 +1-0 1-1 1-2 3-3 5-4 6-5 9-7 8-8 7-9 8-10 9-11 12-12 11-13 15-14 15-15 +0-0 2-1 2-2 4-3 5-4 8-5 8-6 8-7 9-8 +0-0 0-1 1-2 2-3 4-4 5-5 6-6 7-7 8-8 8-9 8-10 9-11 11-12 12-13 +0-0 2-1 4-2 4-3 4-4 4-5 6-6 8-7 9-8 10-9 +1-0 2-1 3-2 4-3 6-4 6-5 +1-0 5-1 7-2 4-3 7-4 8-5 7-6 9-7 14-8 15-9 12-10 13-11 13-12 16-13 +1-0 1-1 1-2 5-3 6-4 8-5 10-6 12-7 11-8 15-9 12-10 14-11 16-12 19-13 20-14 20-15 21-16 +1-0 7-1 3-2 4-3 5-4 4-5 6-6 8-7 13-8 10-9 11-10 14-12 15-13 16-15 +0-0 2-1 2-2 4-3 5-4 6-5 7-6 7-7 8-8 10-9 13-10 16-11 14-12 15-13 11-14 17-15 +2-0 2-1 3-2 9-3 10-4 5-5 8-6 9-7 10-8 11-9 11-10 15-11 +1-0 2-1 4-2 6-3 4-4 6-5 8-6 13-7 12-8 14-9 15-10 16-11 18-12 20-13 +0-0 0-1 4-2 3-3 6-4 5-5 6-6 7-7 +0-0 1-1 2-2 1-3 3-4 6-5 5-6 9-7 6-8 5-9 10-10 10-11 11-12 12-13 +0-0 1-1 2-2 4-3 4-4 +0-0 0-1 6-2 2-3 4-4 10-5 5-6 7-7 11-8 9-9 11-10 13-11 13-12 14-13 +1-0 1-1 1-2 2-3 7-4 5-5 8-6 11-7 10-8 10-9 17-10 11-11 20-12 15-13 15-14 16-15 17-16 21-17 21-18 22-19 25-20 25-21 27-22 26-23 26-24 28-25 29-26 31-27 32-28 30-29 30-30 23-31 34-32 +2-0 3-1 1-2 1-3 4-4 5-5 6-6 7-7 6-8 2-9 10-10 9-12 11-13 12-14 12-15 13-16 14-17 16-18 16-19 17-20 18-21 19-22 +0-0 2-1 3-2 4-3 4-4 6-5 8-6 6-7 10-8 10-9 11-10 13-11 9-12 13-13 14-14 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 5-7 6-8 +0-0 0-1 2-2 4-3 5-4 6-5 6-6 +1-0 7-1 3-2 4-3 4-4 4-5 6-6 8-7 11-8 2-9 13-10 13-11 8-12 10-13 19-14 18-15 19-16 19-17 21-18 +1-0 1-1 1-2 5-3 7-4 5-5 8-6 16-7 9-8 10-9 13-10 21-11 16-12 15-13 21-14 22-15 20-16 23-17 23-18 24-19 26-20 28-21 28-22 30-23 30-24 25-25 32-26 +0-0 2-1 1-2 1-3 3-4 4-5 5-6 6-7 6-8 6-9 7-10 7-11 9-13 10-14 11-15 12-16 +0-0 2-1 4-2 5-3 5-4 6-7 9-8 10-9 15-10 13-11 17-12 12-13 18-14 +0-0 1-1 3-2 4-3 5-4 6-5 +0-0 1-1 5-2 5-3 +0-0 0-1 2-2 2-3 4-4 5-5 5-6 4-7 7-8 8-9 10-10 11-11 12-12 11-13 6-14 15-15 16-16 +1-0 1-1 1-2 5-3 6-4 10-5 7-6 8-7 10-8 11-9 12-10 13-11 15-12 14-13 14-14 15-16 18-17 19-18 19-19 21-20 23-21 24-22 25-23 25-24 28-25 28-26 29-27 29-28 30-29 +0-0 2-1 2-2 4-3 8-4 10-5 6-6 10-7 11-8 +0-0 1-1 2-2 4-3 4-4 5-5 6-6 +0-0 1-1 2-2 6-3 8-5 12-6 10-7 12-8 15-9 14-10 16-11 17-12 +0-0 10-1 8-2 6-4 6-5 18-6 10-7 8-8 13-9 13-10 12-11 13-12 15-13 20-14 17-15 20-16 5-17 21-18 7-19 24-20 24-21 +1-0 1-1 2-2 4-4 7-5 7-6 8-7 9-8 12-9 12-10 13-11 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 6-7 +0-0 3-1 3-2 5-3 1-4 2-5 6-7 4-8 7-9 6-10 7-11 9-12 9-13 10-14 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 +1-0 3-1 2-2 4-3 4-4 2-5 3-6 7-7 9-8 10-9 8-10 5-11 8-12 12-13 8-14 4-15 16-16 16-17 18-18 19-19 19-20 20-21 21-22 +0-0 0-1 1-2 1-3 4-4 3-5 3-6 5-7 5-8 +1-0 5-1 3-2 7-3 5-4 4-5 10-6 10-7 10-8 9-9 11-11 14-12 10-13 13-14 15-15 20-18 19-20 24-21 22-22 39-23 10-24 24-25 27-27 27-28 31-29 32-30 30-31 34-32 23-33 32-34 36-35 28-36 29-37 30-38 38-39 40-40 +1-0 1-1 1-2 3-3 4-4 3-5 6-6 7-7 +0-0 4-1 6-2 7-3 8-4 5-5 12-6 10-7 14-8 15-9 13-10 13-11 16-12 20-13 21-14 22-15 21-16 19-17 22-18 25-19 20-20 26-21 29-22 28-23 31-24 31-25 32-26 33-27 +0-0 0-1 9-2 4-3 4-4 1-5 5-6 7-7 7-8 10-9 11-10 +0-0 1-1 3-2 2-3 4-4 5-5 6-6 6-7 +0-0 2-1 4-2 6-3 4-4 10-5 9-6 21-7 23-8 11-9 10-10 18-11 12-12 8-13 15-14 14-15 15-16 16-17 19-18 20-19 19-20 13-21 18-22 24-23 +1-0 1-1 1-2 7-3 8-4 9-5 10-6 9-7 12-8 9-9 11-10 12-11 15-12 14-13 16-14 18-15 5-17 19-18 21-19 +0-0 1-1 2-2 0-3 6-4 6-5 10-6 9-8 12-9 16-11 15-12 18-13 19-15 16-16 17-17 28-18 26-19 30-20 32-21 33-22 28-23 34-24 +0-0 1-1 1-2 2-4 2-5 5-6 4-7 7-8 7-9 7-10 8-11 +0-0 9-1 1-2 5-3 3-4 6-5 8-6 9-7 11-9 12-10 15-11 16-12 17-13 18-14 19-15 19-16 20-17 +0-0 1-1 0-2 4-3 5-4 6-5 7-6 6-7 9-8 8-9 11-10 12-11 11-12 13-13 14-14 +1-0 1-1 0-2 18-3 14-4 15-5 9-6 15-7 12-8 15-9 13-10 16-12 19-13 +0-0 1-1 4-2 5-3 6-4 7-5 8-6 9-7 10-8 11-9 17-10 16-11 15-12 17-13 18-14 +0-0 1-1 2-2 4-3 4-4 10-5 6-6 6-7 7-8 8-9 10-10 11-11 +0-0 3-1 3-2 3-3 4-4 6-5 5-6 6-7 7-8 8-9 10-10 11-11 +0-0 0-1 2-2 3-3 2-4 4-5 5-6 5-7 6-8 7-9 8-10 13-11 9-12 10-13 10-14 12-15 11-16 12-17 14-18 15-19 +0-0 1-1 2-2 6-3 7-4 3-5 4-6 7-7 8-8 +0-0 2-1 3-2 1-3 6-4 5-5 5-6 7-7 9-8 8-9 10-10 10-11 12-12 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 5-7 6-8 6-9 9-10 7-11 10-12 11-13 10-14 11-15 12-16 13-17 +0-0 1-1 3-2 4-3 5-4 +0-0 4-1 3-2 10-3 0-4 1-5 6-6 7-7 9-8 11-9 12-10 11-11 15-13 18-14 18-15 22-16 17-17 21-18 24-19 25-21 25-22 24-23 26-24 27-25 28-26 32-27 31-28 30-29 34-30 34-31 27-32 36-33 38-34 40-36 +0-0 1-1 2-2 1-3 3-4 3-5 4-6 4-7 5-8 6-9 7-10 8-11 8-12 9-13 9-14 10-15 12-16 11-17 10-18 13-19 13-20 14-21 +0-0 2-1 6-2 7-3 10-4 12-5 8-8 9-9 14-10 18-13 20-14 21-15 23-16 24-17 16-18 26-20 23-21 26-22 29-23 18-24 31-25 32-26 33-27 29-28 33-29 43-30 35-31 12-32 39-33 40-34 41-35 48-36 40-37 37-38 46-39 51-40 46-41 24-42 50-43 50-44 44-45 53-46 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 9-8 10-9 11-10 13-11 14-12 15-13 17-14 16-15 18-16 19-17 +1-0 2-1 6-2 4-3 8-4 3-5 9-6 9-7 11-8 14-9 15-10 17-11 18-12 19-13 20-14 21-15 22-16 24-17 22-18 23-19 +0-0 1-1 2-2 2-3 6-4 4-5 5-6 7-7 7-8 8-9 10-10 10-11 11-12 0-13 11-14 15-16 16-17 17-18 17-19 19-20 20-21 21-22 22-23 23-24 24-25 25-26 +0-0 0-1 4-2 5-3 6-4 7-5 8-6 9-7 8-8 12-9 11-10 10-11 14-12 13-13 15-14 16-15 17-16 18-17 19-18 22-19 20-20 21-21 24-22 23-23 25-24 20-25 26-26 28-27 29-28 30-29 31-30 32-31 37-32 34-33 35-34 37-35 36-36 +0-0 2-1 3-2 5-3 6-4 9-6 7-7 12-8 13-9 14-10 15-11 16-12 17-13 18-14 19-15 20-16 22-17 24-18 24-19 25-20 19-21 8-22 28-23 29-24 28-25 31-26 30-27 29-28 29-29 33-30 35-31 36-32 37-33 38-34 40-35 41-36 42-37 43-38 +1-0 3-1 4-2 5-3 6-4 7-5 8-6 +1-0 3-1 3-2 4-3 7-4 9-5 9-6 11-7 3-8 13-9 14-10 15-11 16-12 +0-0 1-1 1-2 5-3 4-5 10-7 8-8 9-9 10-10 13-11 19-12 14-13 14-14 18-15 18-16 20-17 20-18 22-19 +1-0 3-1 3-2 7-3 7-4 6-5 8-6 10-7 11-8 +0-0 3-1 3-2 6-3 7-4 8-5 6-6 9-7 10-8 13-9 13-10 15-11 16-12 17-13 19-14 19-15 18-16 22-17 23-18 +0-0 2-1 3-2 5-3 6-4 7-5 8-7 9-8 10-9 11-10 14-11 15-12 16-13 17-14 18-15 19-16 20-17 22-18 23-19 24-20 25-21 27-22 28-23 +0-0 1-1 0-2 1-3 2-4 3-5 5-6 8-7 9-8 9-9 11-10 11-11 14-12 12-13 13-14 12-15 16-16 19-17 18-18 19-19 20-20 21-21 22-22 23-23 +0-0 1-1 6-2 7-3 4-4 5-5 4-6 9-7 4-8 10-9 +1-0 1-1 1-2 4-3 5-4 2-5 3-6 6-7 7-8 8-9 10-10 6-11 11-12 12-13 13-14 14-15 16-16 16-17 15-18 17-19 18-20 +1-0 1-1 0-2 5-3 3-4 1-5 11-6 3-7 13-8 8-9 6-10 16-11 18-13 11-14 15-15 12-16 14-17 9-18 15-19 14-20 14-21 17-22 18-23 22-25 21-26 23-27 22-28 21-29 23-30 24-31 25-32 +0-0 2-1 4-2 5-3 +0-0 0-1 3-2 4-3 4-4 6-5 7-6 9-7 10-8 11-9 14-10 15-11 14-12 16-13 +0-0 1-1 3-2 5-3 4-4 4-5 9-6 7-7 9-8 9-9 10-10 12-11 15-12 17-13 18-14 17-15 18-16 20-17 18-18 21-19 22-20 23-21 23-22 24-23 26-24 24-25 29-26 29-27 31-28 33-29 32-30 34-31 35-32 36-33 +0-0 2-1 3-2 5-3 6-4 5-5 8-6 8-7 9-8 10-9 11-10 12-11 13-12 14-13 15-14 16-15 17-16 18-17 19-18 20-19 21-20 22-21 23-22 24-23 25-24 26-25 27-26 30-27 31-28 +1-0 1-1 2-2 3-3 6-4 7-5 8-6 9-7 10-8 11-9 12-10 13-11 13-12 14-13 +1-0 1-1 5-2 4-3 8-4 7-5 6-6 10-7 11-8 11-9 12-10 14-11 15-12 16-13 18-14 19-15 16-16 26-17 20-18 25-19 25-20 23-21 27-22 +0-0 1-1 2-2 4-3 4-4 5-5 6-6 7-7 8-8 9-9 +0-0 2-1 6-2 4-3 3-4 4-5 8-6 8-7 9-8 10-9 11-10 12-11 10-12 14-13 15-14 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 8-7 9-8 11-9 12-10 13-11 15-12 16-13 +0-0 1-1 3-2 5-3 5-4 7-5 8-6 9-7 +0-0 1-1 2-2 3-3 4-4 5-5 4-6 6-7 6-8 25-9 9-11 10-12 10-13 12-15 15-16 15-17 16-18 20-19 19-20 19-21 25-22 19-23 19-24 19-25 19-26 20-27 23-28 25-29 26-30 27-31 25-32 25-33 28-34 31-37 33-40 28-41 34-42 35-44 36-45 +0-0 0-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 8-9 10-10 9-11 13-12 12-13 13-14 10-15 15-16 16-17 16-18 17-19 17-20 19-21 21-22 21-23 22-24 23-25 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 7-7 +0-0 1-1 3-3 4-4 5-5 7-7 8-8 15-9 10-10 12-13 13-14 14-15 15-16 15-17 20-18 16-19 20-21 21-22 19-23 22-24 27-25 26-26 23-27 28-30 31-31 29-33 28-34 39-35 34-36 34-37 43-39 34-40 37-41 37-42 35-43 37-44 37-45 35-46 48-47 31-48 44-49 47-50 47-51 48-52 52-53 49-54 49-55 51-56 28-57 57-58 53-59 52-60 57-61 57-62 57-63 28-64 59-65 59-66 61-67 66-68 63-69 66-72 66-73 66-74 66-75 70-76 71-77 +0-0 0-1 1-2 3-3 4-4 5-5 5-6 6-7 9-8 10-9 12-10 14-11 13-13 16-14 18-15 16-16 17-17 19-18 16-19 20-20 +0-0 1-1 2-2 8-3 6-4 5-6 5-7 5-9 10-10 11-11 11-13 16-14 14-15 18-16 16-17 17-18 17-19 18-20 22-21 24-22 22-23 20-24 27-25 26-26 29-27 28-28 30-29 +0-0 0-1 3-2 2-3 2-4 4-5 5-6 6-7 7-8 8-9 9-10 10-11 9-12 11-14 12-15 12-16 13-17 14-18 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 9-7 8-9 9-10 9-11 10-12 11-13 12-14 13-15 14-16 15-17 16-18 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 10-9 9-10 13-11 12-12 14-13 15-14 16-15 17-16 18-17 18-18 19-19 20-20 21-21 +0-0 1-1 2-2 3-3 4-4 9-5 5-6 5-7 6-8 8-9 9-10 10-11 11-12 12-13 13-14 15-15 17-17 16-18 8-19 18-20 8-21 19-22 19-23 22-24 21-25 24-26 25-27 27-28 26-29 27-30 29-31 29-32 30-33 33-34 33-35 31-36 35-37 36-38 37-39 34-40 38-41 39-42 +0-0 1-1 0-2 2-3 3-4 4-5 5-6 4-7 5-9 5-10 9-11 9-12 10-13 11-14 12-15 13-16 +1-0 1-1 3-2 4-3 5-4 5-5 6-6 7-7 8-8 8-9 10-10 10-11 11-12 16-14 14-15 19-16 18-17 18-18 23-19 19-20 18-21 23-22 24-23 +0-0 1-1 3-2 0-3 5-4 7-5 10-6 12-7 9-9 13-10 8-11 16-12 11-13 18-14 15-15 20-16 19-17 19-18 23-19 21-20 21-21 17-22 25-23 26-24 27-25 30-26 33-27 32-28 32-29 30-30 33-31 34-32 33-33 36-34 35-35 37-36 +0-0 1-1 2-2 2-3 4-4 5-5 13-6 2-7 6-8 7-9 8-10 8-11 10-12 11-13 12-14 11-15 11-16 12-17 7-19 15-20 16-21 9-22 16-23 17-24 17-25 18-26 19-27 +0-0 1-1 2-2 3-3 3-4 5-5 6-6 7-7 8-8 14-9 11-10 11-11 13-12 12-13 16-14 15-15 18-16 17-17 18-18 19-19 20-20 22-21 23-22 24-23 20-24 25-25 +0-0 1-1 2-2 3-3 4-4 6-5 5-6 7-7 8-10 8-12 8-13 9-14 10-15 12-16 11-17 14-18 15-19 16-20 18-22 18-23 18-24 19-25 20-26 22-28 23-29 24-30 26-31 25-32 +0-0 0-1 1-2 0-3 2-4 3-5 4-6 4-7 5-8 6-9 9-10 10-12 10-13 11-14 +0-0 1-1 2-2 3-3 7-4 5-5 5-6 8-7 8-8 14-10 12-11 13-12 18-13 16-14 18-15 23-16 22-17 23-18 23-19 23-20 24-21 28-22 27-23 30-24 29-25 32-26 32-27 39-28 36-29 39-30 40-31 40-32 42-33 43-34 44-35 45-36 +0-0 1-1 3-2 5-3 4-4 6-5 7-6 8-7 +0-0 1-1 2-2 3-3 5-4 6-5 8-6 7-9 9-10 10-11 11-12 12-13 14-14 14-15 15-16 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 +0-0 1-1 2-2 4-3 3-4 5-5 5-6 13-7 9-9 10-10 13-11 16-12 24-13 17-14 15-15 17-16 17-17 18-18 19-19 21-20 24-21 25-22 22-23 26-24 27-25 +0-0 2-1 2-2 3-3 5-4 6-5 7-6 8-7 +0-0 0-1 3-2 4-3 5-4 5-5 6-6 7-7 8-8 9-9 11-10 12-11 13-12 +1-0 4-1 3-2 3-3 3-4 4-5 3-6 4-7 7-8 6-9 8-10 9-11 9-12 12-13 12-16 14-17 14-18 16-19 11-20 16-21 17-22 +0-0 1-1 2-2 4-3 4-4 6-5 9-6 9-7 10-8 11-9 20-10 10-11 25-12 13-13 14-14 17-15 15-16 19-17 22-18 23-19 24-20 24-21 25-22 +2-0 2-1 3-2 4-3 5-4 7-5 7-6 9-7 9-8 10-9 4-10 12-11 14-12 13-14 16-15 20-16 15-17 22-18 19-19 22-20 24-21 +1-0 0-1 5-2 8-3 9-4 11-5 5-6 6-7 5-8 15-11 10-12 10-13 12-14 12-15 20-16 14-17 13-18 17-19 21-20 18-22 22-23 25-24 22-25 23-26 26-27 24-28 24-29 26-30 27-31 28-33 29-34 30-35 31-36 +0-0 1-1 4-2 3-3 4-4 5-5 6-6 9-7 10-8 9-9 12-10 7-11 8-12 12-13 13-14 14-15 15-16 16-17 17-18 18-19 18-20 19-21 +0-0 1-1 2-2 2-3 3-4 3-5 5-6 6-7 5-8 5-9 6-10 7-11 8-12 9-13 10-14 11-15 12-16 +0-0 3-1 4-2 4-3 6-4 7-5 7-6 14-7 7-8 12-9 8-10 14-11 15-12 +0-0 1-1 1-2 2-3 3-4 3-5 5-6 7-7 8-8 6-9 8-10 9-11 4-12 8-13 10-14 12-15 13-16 11-17 13-18 13-19 15-20 16-21 17-22 18-23 18-24 19-25 21-26 22-27 18-28 19-29 23-30 +0-0 1-1 3-2 4-3 6-4 9-5 9-6 11-7 13-8 15-9 16-10 16-11 17-12 +0-0 1-1 3-2 4-3 4-4 6-5 6-6 7-7 7-8 11-9 8-10 12-11 13-12 15-13 14-14 16-15 17-16 16-17 20-18 19-19 20-20 21-21 20-22 22-23 23-24 23-25 26-26 31-27 27-28 29-29 30-30 32-31 30-32 34-33 33-34 +0-0 1-1 3-2 4-3 4-4 7-5 8-6 8-7 7-8 15-9 13-10 15-11 16-12 13-13 18-14 18-15 20-16 22-17 23-18 24-19 19-20 26-21 27-22 27-23 30-24 31-25 33-26 35-27 37-29 38-30 +0-0 1-1 2-2 4-3 4-4 5-5 4-6 7-7 8-8 10-10 11-11 12-12 15-13 14-14 13-15 18-16 18-17 19-18 19-19 21-20 22-21 23-22 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 9-10 11-11 11-12 10-13 12-14 +0-0 2-1 1-2 5-3 4-4 6-5 9-6 8-7 10-8 9-9 12-10 11-11 10-12 14-13 15-14 16-15 17-16 18-17 18-18 24-21 22-23 25-25 26-27 29-28 +5-0 1-1 1-2 4-3 2-4 2-5 1-6 6-7 3-8 4-9 10-10 10-11 11-12 10-13 12-14 6-15 13-16 13-17 15-19 16-20 18-21 19-22 20-23 20-24 22-25 23-26 25-28 26-29 27-30 28-31 28-32 +0-0 1-1 2-2 2-3 2-4 5-5 5-6 13-8 3-9 4-10 10-11 17-12 12-13 13-14 16-15 9-16 6-17 7-18 17-19 21-20 21-21 19-22 23-23 24-24 18-25 19-26 30-27 25-28 21-29 27-30 25-31 29-32 28-33 29-34 31-35 +0-0 1-1 2-2 3-3 5-4 6-5 4-6 5-7 7-8 7-9 10-10 10-11 9-12 10-13 11-14 12-15 14-16 16-17 16-18 18-19 17-20 20-21 19-22 21-23 +0-0 4-1 3-2 7-3 8-4 5-5 10-7 9-8 10-9 11-10 12-11 13-12 14-13 16-14 17-15 18-16 18-17 20-18 20-19 20-20 20-21 22-22 23-23 28-24 28-25 +0-0 1-1 2-2 2-3 3-4 3-5 4-6 6-7 7-8 6-9 7-10 8-11 9-12 10-13 11-14 12-15 13-16 11-17 14-18 +0-0 1-1 1-2 4-3 6-4 10-5 5-6 5-7 6-8 7-9 8-10 11-11 13-12 12-13 14-14 15-15 16-16 18-17 19-18 19-19 20-20 21-21 20-22 21-23 22-24 +2-0 10-1 11-2 2-3 4-5 5-6 6-7 7-8 7-9 7-10 7-11 8-12 11-14 9-15 9-16 15-17 16-18 17-19 17-20 12-21 19-22 18-23 16-24 18-25 20-26 17-27 20-28 21-29 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 4-7 7-9 12-10 8-11 9-12 11-13 11-14 10-15 14-16 13-17 14-19 17-20 19-21 20-23 20-24 +0-0 0-1 5-2 4-3 4-4 3-5 6-6 7-7 8-8 +0-0 1-1 2-2 3-3 9-4 6-5 6-6 8-7 11-8 8-9 12-10 11-11 12-12 10-13 15-14 16-15 16-16 17-17 +0-0 1-1 2-2 4-3 4-4 4-5 4-6 5-7 7-8 8-9 11-10 13-11 11-12 10-13 15-14 17-15 13-16 16-17 28-18 19-19 21-20 21-21 20-22 22-23 21-24 22-25 27-26 20-27 25-29 23-30 28-31 +0-0 1-1 2-2 2-3 2-4 4-5 6-6 8-7 7-8 8-9 11-10 13-11 14-12 13-13 12-14 18-15 17-16 18-17 19-18 22-19 +0-0 1-1 2-2 2-3 6-5 6-6 5-7 6-8 8-10 9-11 12-12 13-13 9-14 10-15 15-16 14-17 15-18 14-19 15-20 19-21 20-22 21-23 24-24 23-25 26-26 25-27 26-28 32-29 29-32 30-33 33-34 29-35 34-36 35-37 36-38 34-39 37-40 37-41 40-42 +0-0 1-1 2-2 2-3 3-4 3-5 4-6 8-7 5-8 6-9 4-10 8-11 7-12 18-13 11-14 12-15 12-16 14-17 15-18 16-20 17-21 15-22 18-23 +0-0 1-1 3-2 4-3 7-4 6-5 10-6 16-7 11-8 13-9 13-10 11-11 15-12 17-13 19-14 21-15 20-16 22-17 +0-0 3-1 3-2 5-3 6-4 7-5 8-6 10-7 11-8 11-9 12-10 14-11 +0-0 1-1 2-2 3-3 5-4 6-5 8-6 9-7 10-8 10-10 14-11 11-12 15-13 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 6-7 7-8 10-9 8-10 9-11 11-12 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 8-7 9-8 10-9 +0-0 1-1 14-2 15-3 3-4 2-5 7-6 8-7 9-8 10-9 11-10 11-11 1-12 12-13 6-14 14-15 16-16 +0-0 1-1 3-2 4-3 6-4 11-6 8-7 8-8 7-9 11-10 13-11 +0-0 1-1 2-2 2-3 4-4 4-5 5-6 8-7 10-8 7-9 10-10 11-11 +0-0 2-1 0-2 3-3 3-4 4-5 4-6 4-7 14-8 15-9 7-11 6-12 10-13 10-14 9-15 11-16 12-17 15-19 13-20 16-21 +0-0 1-1 2-2 4-3 4-4 6-5 7-6 8-7 9-8 12-9 11-10 10-11 13-12 14-13 15-14 17-15 18-16 18-17 19-18 21-19 22-20 23-21 +0-0 1-1 2-2 3-3 5-4 6-5 9-6 7-7 11-8 12-9 13-10 14-11 +0-0 1-1 1-2 7-3 4-4 7-5 0-6 1-7 7-8 8-9 9-10 8-11 12-12 11-13 11-14 13-15 14-16 15-17 16-18 +3-1 20-2 6-3 5-6 6-8 8-9 10-10 1-11 2-12 10-13 4-14 10-16 16-17 9-18 18-19 17-20 21-21 13-22 24-23 15-24 24-25 26-27 +0-0 2-1 0-2 6-3 3-4 4-5 5-6 10-7 11-8 13-9 7-10 15-11 11-12 11-13 17-14 18-15 14-16 16-17 17-18 17-20 19-21 21-23 22-24 23-25 +0-0 2-1 2-2 3-3 3-4 6-5 4-6 1-7 8-8 14-9 9-10 5-11 9-12 10-13 9-14 12-15 14-16 14-17 15-18 16-19 15-20 16-21 17-22 +0-0 1-1 2-2 3-3 3-4 3-5 6-6 4-7 5-8 6-9 7-10 8-11 10-12 8-13 13-14 12-15 14-16 15-18 15-19 17-20 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 7-7 8-8 +0-0 1-1 3-2 3-3 5-4 7-5 6-6 10-8 11-9 13-10 9-11 14-12 +2-0 1-1 2-2 4-3 6-4 7-5 6-6 7-7 9-8 10-9 12-10 13-11 14-12 16-13 17-14 20-15 21-16 22-17 25-19 26-20 27-21 30-22 29-23 31-24 33-25 34-26 +0-0 1-1 2-2 5-3 6-4 7-5 4-6 9-7 12-8 13-9 +2-0 2-1 3-2 4-3 5-4 6-5 7-6 8-7 10-8 10-9 11-10 +0-0 1-1 1-2 1-3 3-4 4-5 5-6 5-7 6-8 8-9 8-10 +0-0 1-1 7-2 8-3 3-4 4-5 2-6 7-7 8-8 12-9 13-10 10-11 9-13 10-14 8-15 15-17 14-18 15-19 17-20 18-21 20-22 20-23 15-24 22-25 21-26 29-27 23-28 24-29 23-30 31-31 26-32 33-33 34-34 30-35 35-36 29-37 30-38 38-39 39-40 35-41 36-42 36-43 27-45 37-46 36-47 40-48 +0-0 1-1 2-2 3-3 4-4 5-5 8-6 8-7 4-8 9-9 10-10 11-11 10-12 14-13 11-14 16-15 17-16 18-17 15-18 15-19 22-20 18-21 19-22 19-23 19-24 21-25 20-26 23-27 23-28 +2-0 4-1 5-2 6-3 8-4 9-5 10-6 5-7 11-8 23-11 16-14 11-15 15-16 14-17 8-18 23-19 23-21 26-22 27-23 20-25 19-27 12-28 25-29 26-30 33-32 34-33 34-34 31-37 28-38 31-39 36-40 26-41 57-42 41-44 42-45 41-46 43-47 39-48 44-49 47-51 49-52 49-53 47-56 52-57 54-58 47-59 52-60 56-63 59-64 59-65 52-66 59-67 +0-0 1-1 3-2 3-3 3-4 4-5 6-6 6-7 7-8 +0-0 1-1 4-2 3-3 3-4 13-5 6-6 11-7 12-8 7-9 8-10 14-11 15-12 13-13 14-14 5-15 6-16 13-17 14-18 15-19 16-20 18-21 21-22 21-23 20-24 22-25 22-26 25-27 26-28 27-29 20-30 28-31 28-32 +0-0 2-1 3-3 3-4 4-5 5-6 6-7 7-8 8-9 8-10 10-11 12-12 9-13 13-14 14-15 14-16 17-17 18-18 17-19 21-20 20-21 19-22 23-23 +3-0 1-1 1-2 2-3 5-4 4-5 5-6 9-7 11-8 10-9 13-10 16-11 13-12 27-13 6-14 29-16 29-17 23-18 22-19 25-20 27-21 23-22 24-23 28-24 28-25 32-26 31-27 32-28 33-29 41-30 41-31 40-32 40-33 38-34 43-35 40-36 39-38 44-39 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 12-7 9-8 8-9 13-10 12-11 14-12 16-13 18-14 13-15 20-16 21-17 22-18 21-19 20-20 21-22 21-23 28-24 21-25 30-26 30-27 31-28 32-29 34-30 34-31 35-32 37-33 38-34 38-35 40-36 35-37 39-38 40-39 42-40 +0-0 1-1 2-2 3-3 4-4 8-5 7-6 9-7 7-8 11-9 12-10 11-11 13-12 14-13 14-14 18-15 19-16 16-17 18-18 20-19 21-20 18-21 25-22 24-24 23-25 27-26 29-27 29-28 +0-0 2-1 3-2 1-3 4-4 4-5 10-6 10-7 12-8 5-9 13-10 16-11 14-12 15-13 21-14 19-15 14-16 21-17 19-18 23-19 19-20 20-21 25-22 26-23 20-24 22-25 22-26 31-27 32-30 34-31 30-32 30-33 36-34 32-35 33-36 52-37 34-38 40-40 41-42 42-43 44-44 45-45 43-46 47-48 46-49 47-50 50-51 50-52 52-54 +0-0 8-1 4-3 3-4 4-5 4-6 5-7 7-8 8-9 9-10 8-11 11-12 12-13 13-14 13-15 14-16 17-17 18-18 16-19 18-20 17-21 20-22 17-23 22-24 31-25 24-26 24-27 25-28 27-29 31-30 31-31 33-32 31-33 34-34 36-35 34-36 36-37 28-38 29-39 39-40 44-41 27-42 44-43 44-44 44-45 46-46 47-47 48-48 49-49 50-50 49-51 49-52 41-54 53-55 +1-0 3-1 3-2 3-3 3-4 6-5 4-6 11-7 7-8 10-9 11-11 15-12 11-13 12-14 8-15 14-16 15-17 17-18 17-19 18-20 19-21 20-22 21-23 23-24 25-25 26-26 24-27 27-28 27-29 30-31 32-32 28-33 32-35 33-36 34-37 35-38 37-39 39-40 40-41 41-42 41-43 43-44 51-45 42-46 46-47 44-48 48-49 49-50 50-51 51-52 +0-0 2-1 4-2 4-3 3-4 5-6 10-7 11-8 7-9 8-10 9-11 6-12 15-14 19-15 6-16 16-17 18-18 18-19 22-20 20-21 16-22 25-23 29-24 29-25 28-26 29-27 30-28 34-29 34-30 35-31 +0-0 1-1 1-2 2-3 4-4 4-5 4-6 7-9 7-10 12-11 9-12 14-13 15-14 12-15 17-16 15-17 13-18 17-19 17-20 10-21 19-22 19-24 20-25 21-26 22-27 23-28 24-29 +0-0 1-1 6-2 3-3 6-4 4-5 5-6 6-7 7-8 12-9 8-10 10-11 11-12 8-13 9-14 12-15 13-16 17-18 16-19 17-20 17-21 17-22 32-23 21-24 23-25 25-26 27-27 17-28 28-29 28-30 27-31 30-32 33-33 33-34 29-35 32-36 33-37 36-38 36-39 37-40 20-41 37-42 40-43 42-44 43-45 44-46 41-47 46-48 46-49 +0-0 1-1 2-2 4-3 5-4 7-5 16-6 11-7 14-8 13-9 14-10 18-12 19-13 +0-0 0-1 2-2 3-3 4-4 4-5 6-6 6-7 8-8 8-9 +0-0 2-1 2-2 5-5 7-6 6-7 6-8 5-9 6-10 10-11 15-14 13-15 13-16 18-17 19-18 20-19 21-20 17-21 21-22 22-23 +0-0 2-1 1-2 2-3 4-4 6-5 2-6 6-7 5-8 8-9 9-10 10-11 9-12 12-14 17-15 13-16 16-18 11-19 16-20 13-21 19-22 19-23 21-24 22-25 20-26 21-27 24-28 +0-0 2-1 4-2 3-3 5-4 7-6 6-7 3-8 7-9 11-10 12-11 13-12 12-13 15-14 13-15 17-16 18-17 17-18 22-19 22-20 24-21 24-22 25-23 27-24 28-25 29-26 36-27 33-28 33-29 33-30 36-33 36-34 36-35 36-36 37-37 39-38 40-39 36-40 37-41 42-42 43-43 44-44 45-45 46-46 45-47 44-48 47-49 +0-0 1-1 2-2 5-3 4-4 5-5 8-6 10-7 8-8 9-9 9-10 9-11 13-12 16-13 17-14 15-15 16-16 19-17 18-18 21-19 20-20 22-21 +0-0 1-1 1-2 3-3 4-4 5-5 7-6 9-7 10-8 7-9 7-10 11-11 11-12 12-13 13-14 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 13-7 13-8 7-9 13-10 13-11 16-13 17-14 +0-0 1-1 3-2 3-3 5-4 6-5 11-6 12-7 13-8 19-9 15-10 16-11 17-12 19-13 19-14 21-15 21-16 23-17 24-18 25-19 26-20 29-21 32-22 31-23 32-24 33-25 29-26 35-27 29-28 35-29 39-30 +0-0 1-1 3-2 3-3 4-4 5-5 7-6 8-7 7-8 7-9 8-10 10-11 11-12 12-13 12-14 16-15 14-16 13-17 17-18 18-19 18-20 19-21 +0-0 1-1 2-2 4-3 6-4 7-5 8-6 9-7 11-8 10-9 10-10 15-11 13-12 14-13 16-14 18-15 23-16 23-17 24-18 +0-0 1-1 3-2 4-3 7-4 6-6 8-7 10-8 14-11 13-12 12-13 13-14 17-15 19-17 22-18 22-20 24-21 25-22 26-23 27-24 28-25 29-26 +0-0 2-2 4-3 5-4 6-5 0-6 4-7 4-8 11-9 12-10 6-11 13-12 11-13 8-14 16-15 16-16 15-17 16-19 17-20 21-21 21-24 23-25 26-26 25-27 24-28 24-29 25-30 15-32 29-33 29-34 36-36 34-37 32-38 36-39 34-40 33-41 33-42 38-43 +0-0 1-1 2-2 3-3 3-4 3-5 6-6 6-7 5-8 7-9 8-10 +0-0 1-1 5-2 6-3 4-4 5-5 4-6 6-7 8-8 9-9 10-10 12-11 13-12 12-13 11-14 14-15 +0-0 1-1 2-2 3-3 5-4 6-5 5-6 7-7 +0-0 1-1 1-2 2-3 3-4 6-5 7-6 5-7 8-8 +0-0 4-1 9-2 11-4 3-5 2-6 5-7 6-8 7-9 8-10 10-11 11-12 12-13 9-14 53-15 15-16 12-17 16-18 22-19 18-20 12-21 22-22 23-23 22-24 23-25 36-26 25-27 26-28 27-29 28-30 29-31 36-32 42-33 35-34 36-35 35-36 39-37 39-38 39-39 42-40 43-41 44-42 48-43 39-44 47-45 41-46 49-47 50-50 51-51 52-52 53-53 54-54 51-55 55-56 56-57 57-58 +0-0 1-1 4-2 2-3 3-4 7-5 6-6 8-7 8-8 10-9 11-10 12-11 13-12 15-14 16-15 17-16 21-17 18-18 19-19 23-20 22-21 +0-0 0-1 2-2 3-3 0-4 4-5 7-6 7-7 5-8 9-9 8-10 10-11 11-12 14-14 15-15 16-16 15-17 18-18 17-19 16-20 19-21 16-22 20-23 22-24 20-25 21-26 22-27 23-28 24-29 +0-0 1-1 2-2 2-3 5-4 5-5 6-6 5-7 8-8 7-9 10-10 10-11 10-12 12-13 13-14 14-15 16-16 17-17 +0-0 2-1 3-2 4-3 4-4 5-5 5-6 6-7 8-8 10-9 9-10 12-11 13-12 15-13 12-14 17-15 17-16 16-17 18-18 18-19 +0-0 0-1 6-2 6-3 11-4 5-5 3-6 9-7 10-8 8-9 12-10 13-11 14-12 12-13 19-14 14-15 16-16 17-17 18-18 19-19 19-20 20-21 22-22 24-23 25-24 26-25 27-26 24-27 28-28 29-29 30-30 31-31 32-32 +0-0 1-1 2-2 4-3 4-5 7-6 6-7 8-8 8-9 11-10 12-11 13-12 14-13 15-14 18-15 17-16 19-17 26-18 21-19 11-21 25-22 25-23 23-24 23-26 27-27 29-28 33-29 32-30 35-31 34-32 36-33 +0-0 1-1 2-2 6-3 4-4 20-5 5-6 9-7 10-8 11-9 13-10 11-11 11-12 14-13 14-14 15-15 16-16 17-17 18-18 20-19 19-20 21-21 +0-0 0-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 13-11 14-12 12-13 11-14 15-15 17-16 16-17 16-18 19-19 21-20 21-21 22-22 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 10-9 11-10 9-11 9-12 10-13 13-14 15-15 14-16 16-17 18-18 14-19 19-21 23-23 +0-0 0-1 9-3 13-4 3-5 4-6 8-7 12-8 10-9 16-10 17-11 17-12 19-14 19-15 21-16 22-17 22-18 26-19 20-20 25-21 +0-0 1-1 3-2 4-3 7-4 5-5 7-6 8-7 9-8 10-9 11-10 12-11 13-12 7-13 12-14 15-17 18-18 18-19 20-20 19-21 19-22 18-23 22-24 23-25 25-26 25-27 25-28 26-29 27-30 27-31 28-32 33-33 32-34 30-35 31-36 30-37 31-38 33-39 35-40 +0-0 1-1 4-2 5-3 7-4 8-5 6-6 7-7 9-8 9-9 9-10 11-11 12-12 14-13 12-14 12-15 14-16 17-17 19-18 22-19 21-20 20-21 22-22 23-23 +0-0 1-1 2-2 3-3 4-4 8-5 9-6 6-7 7-8 11-9 10-10 12-12 13-13 14-14 17-15 16-16 18-17 18-18 19-19 +0-0 1-1 2-2 3-3 6-4 3-5 5-6 9-7 10-8 9-9 8-10 12-11 10-12 13-13 11-14 15-15 13-16 16-17 16-18 17-19 18-20 19-21 14-22 20-23 21-24 22-25 23-26 23-27 24-28 28-29 27-30 26-31 29-33 28-34 32-35 32-36 25-37 33-38 +0-1 2-2 3-3 4-4 6-5 8-6 10-7 10-8 12-9 13-10 11-11 12-12 15-13 24-14 24-15 15-16 17-18 20-22 26-23 26-24 27-25 +0-0 1-1 0-2 1-3 2-4 3-5 3-6 3-7 4-8 5-9 5-10 5-11 10-12 6-13 1-14 9-15 9-16 9-17 8-18 11-19 13-20 13-21 12-22 13-23 8-24 14-25 14-26 15-27 +0-0 1-1 3-2 2-3 6-4 8-5 9-6 8-7 5-8 12-9 12-10 15-11 16-12 20-13 17-15 21-16 +0-0 1-1 2-2 2-3 4-4 3-5 8-6 8-7 9-8 9-9 11-10 16-11 13-12 10-13 14-14 19-15 19-16 19-17 20-18 21-19 22-20 +0-0 2-1 3-3 5-4 5-5 7-6 8-7 9-8 14-9 15-10 14-11 18-12 17-13 17-14 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 7-7 9-8 8-9 10-10 11-11 9-12 12-13 13-14 14-15 16-16 17-17 18-18 19-19 21-21 22-22 20-23 23-24 24-25 27-26 26-27 23-28 27-29 27-30 31-31 25-32 31-33 32-35 +0-0 1-1 2-2 3-4 4-5 6-6 8-7 10-8 11-9 12-10 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 7-8 8-9 9-10 14-11 10-12 13-13 13-14 14-15 17-16 15-17 18-18 18-19 19-20 20-21 21-22 23-23 24-24 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 6-7 6-8 8-9 10-10 11-11 12-12 13-13 14-14 15-15 +0-0 1-1 3-2 2-3 3-4 3-5 6-6 3-7 11-8 10-10 11-11 14-13 10-14 10-15 6-16 51-17 26-18 16-19 21-25 23-26 14-27 24-28 25-29 23-30 29-33 31-35 36-37 37-38 43-44 44-45 45-46 45-47 49-50 46-51 48-52 48-53 49-54 45-55 52-57 49-59 57-61 58-62 57-63 59-64 +0-0 2-2 10-3 3-4 12-5 4-6 4-7 8-8 9-9 8-10 7-11 8-12 9-13 11-14 13-15 15-16 16-17 16-18 17-19 19-20 18-21 21-22 22-24 23-25 +0-0 2-1 3-2 1-3 5-5 8-6 7-7 7-8 9-9 14-10 11-11 11-12 13-13 14-14 14-15 16-16 16-17 20-18 14-19 20-20 21-21 22-22 24-23 24-24 24-25 18-26 24-27 36-28 28-29 28-30 28-31 28-32 35-33 34-35 35-36 37-38 +0-0 1-1 1-2 4-3 4-4 6-5 6-6 9-7 11-8 10-9 12-10 12-11 14-12 15-13 16-14 17-15 19-16 19-17 21-18 +0-0 1-1 2-2 4-3 4-4 3-5 8-6 6-7 7-8 9-9 9-10 3-11 11-12 10-14 12-15 14-16 12-17 18-19 17-20 16-21 18-22 19-23 20-24 21-25 22-26 23-27 24-28 25-29 26-30 27-31 28-32 29-33 31-34 30-35 +0-0 2-1 4-2 2-3 4-4 3-5 4-6 6-7 9-8 8-9 9-10 11-11 11-12 13-13 14-14 15-15 16-16 17-17 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 2-8 8-9 9-10 10-11 11-12 12-13 13-14 14-15 18-16 17-17 18-18 19-19 20-20 21-21 20-22 23-23 26-24 26-26 28-27 29-28 31-29 35-30 31-31 34-32 31-33 32-35 38-36 39-37 40-38 42-39 42-40 42-41 44-42 44-43 46-44 47-45 +0-0 1-1 2-2 3-3 5-4 6-5 6-6 10-7 8-8 9-9 10-10 11-11 +0-0 1-1 3-2 3-3 4-4 5-5 6-6 7-7 9-8 11-10 10-11 12-13 11-14 14-15 15-16 18-17 18-18 19-19 21-20 20-21 22-22 23-23 24-24 25-25 26-26 28-28 29-29 +0-0 2-1 3-2 7-3 8-4 9-5 5-6 10-7 6-8 8-9 12-10 13-11 12-12 14-13 14-14 16-15 +0-0 1-1 6-2 3-3 8-4 9-5 6-6 7-7 9-8 12-9 14-10 13-11 15-12 15-13 15-14 18-15 17-16 19-17 20-18 +0-0 1-1 2-2 3-3 4-4 5-5 +0-0 1-1 2-2 3-3 4-4 5-5 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 +0-0 0-1 1-2 4-3 3-4 5-5 5-6 9-7 8-8 9-9 11-10 21-11 12-12 15-14 18-16 19-17 16-18 19-19 21-20 +0-0 5-1 4-2 11-3 2-4 3-5 3-6 5-7 5-8 4-9 5-10 5-11 6-12 8-13 10-15 11-16 12-17 13-18 12-19 16-20 17-21 20-23 21-24 21-27 21-28 26-29 22-30 23-31 27-32 27-33 27-34 27-35 29-36 33-37 33-38 33-39 33-40 34-41 38-43 38-44 38-45 38-46 38-47 37-48 42-49 42-50 41-51 +0-0 2-1 3-2 4-3 18-4 6-5 7-6 8-7 9-8 11-9 12-10 3-11 14-12 14-13 17-14 18-15 19-16 19-17 20-18 20-19 25-20 23-21 25-22 21-23 26-24 29-25 29-26 +0-0 1-1 2-2 3-3 3-4 4-5 7-6 14-7 15-9 8-10 12-11 8-12 11-14 12-15 13-16 15-17 18-18 17-19 17-20 18-21 20-22 21-24 23-25 25-26 26-27 23-28 28-29 29-30 23-31 28-32 30-33 30-34 36-35 35-36 35-37 33-38 32-39 31-40 37-41 +0-0 1-1 2-2 2-3 3-4 4-5 4-6 6-7 9-8 11-9 8-10 10-11 9-12 12-13 14-14 14-15 15-16 15-17 14-18 16-19 18-20 18-22 19-23 19-24 21-25 +0-0 2-1 4-2 4-3 5-4 7-5 6-6 9-7 8-8 10-9 11-10 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 +0-0 0-1 3-2 7-3 4-4 5-5 8-6 8-7 10-8 9-9 11-10 12-11 14-12 15-13 16-14 17-15 19-16 18-17 21-18 22-19 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 9-8 10-9 13-10 12-11 14-12 16-13 16-15 15-16 19-17 18-18 20-19 +1-0 1-1 2-2 4-3 6-4 +0-0 2-1 2-2 7-3 2-4 9-5 11-6 11-7 11-8 11-9 15-10 17-11 17-12 18-13 18-14 23-15 24-16 24-17 25-18 +0-0 1-1 2-2 3-3 4-4 5-5 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 9-8 10-9 11-10 12-11 13-12 13-13 15-14 16-15 16-16 18-17 18-18 19-19 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 +0-0 3-1 1-2 2-3 4-4 6-5 4-6 5-7 8-8 7-9 8-10 12-11 10-12 11-13 15-14 16-15 15-16 17-17 +0-0 1-1 2-2 3-3 3-4 6-5 6-6 7-7 8-8 9-9 +0-0 1-1 2-2 3-3 2-4 5-5 7-6 8-7 5-8 10-9 9-10 11-11 12-12 13-13 14-14 15-15 15-16 16-17 17-18 18-19 19-20 20-21 +0-0 1-1 1-2 4-3 4-4 6-5 7-6 5-7 7-8 7-9 10-10 12-11 12-13 15-14 15-15 14-16 16-17 17-18 19-19 19-20 19-21 20-22 21-23 24-24 24-25 22-26 25-27 26-28 +0-0 1-1 2-2 5-3 3-4 7-5 12-6 11-7 10-8 13-9 14-10 16-11 17-12 18-13 19-14 +0-0 1-1 1-2 2-3 3-4 3-5 4-6 4-7 5-8 6-9 6-10 7-11 +0-0 1-1 3-2 4-3 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 3-7 7-8 8-9 9-10 10-11 11-12 +0-0 1-1 2-2 3-3 4-4 5-5 7-7 12-8 12-9 14-10 15-11 13-12 17-13 18-14 19-15 21-16 21-17 22-18 23-19 24-20 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 8-7 8-8 9-9 +0-0 1-1 2-2 5-3 7-4 6-5 7-6 10-7 10-8 11-9 +0-0 1-1 2-2 5-3 2-4 4-5 5-6 4-7 7-8 8-9 10-10 10-11 11-12 12-13 12-14 13-15 +0-0 1-1 1-2 2-3 4-4 8-5 7-6 9-7 10-8 10-9 11-10 12-11 13-12 15-13 16-14 18-15 19-16 20-17 17-18 21-19 23-20 25-21 26-22 27-23 22-24 27-25 +0-0 1-1 3-2 2-3 5-4 6-5 7-6 5-7 8-8 5-9 13-10 13-11 16-12 17-13 18-14 19-15 19-16 21-17 20-18 22-19 23-20 24-21 +0-0 1-1 2-2 4-3 4-4 6-5 7-6 8-7 9-8 10-9 12-10 11-11 17-12 16-14 20-15 18-16 22-17 18-18 18-19 23-20 19-21 23-22 15-23 13-24 25-25 +1-0 0-1 2-3 3-4 3-5 4-6 5-7 6-8 9-9 9-10 8-11 10-12 11-13 13-14 13-15 14-16 16-17 16-18 16-19 18-20 17-21 20-22 18-23 21-24 +0-0 2-1 1-2 7-3 8-4 9-5 9-7 13-8 12-9 13-10 14-11 17-12 18-13 +0-0 1-1 2-2 3-3 5-4 4-5 8-6 5-7 10-8 7-9 11-11 16-12 9-13 13-14 12-15 14-16 15-17 17-18 16-19 18-20 18-21 19-22 20-23 21-24 22-25 21-26 25-27 26-28 27-29 28-30 29-32 29-34 30-35 +0-0 1-1 4-2 5-3 6-4 7-5 8-6 7-7 9-8 10-9 10-10 11-11 12-12 14-13 15-14 17-15 16-16 16-17 18-18 20-20 +0-0 1-1 2-2 2-3 3-4 3-5 2-6 6-7 7-8 8-9 9-10 10-11 10-12 9-13 13-14 12-15 16-16 15-17 15-18 17-19 18-20 20-21 20-22 22-23 23-24 21-25 20-26 24-27 26-28 22-29 27-30 27-31 28-32 28-33 31-34 32-35 32-36 31-37 33-38 +0-0 1-1 2-2 4-3 4-4 6-5 5-6 7-7 8-8 8-9 10-10 10-11 11-12 13-13 12-14 14-15 15-16 16-17 16-18 17-19 19-21 20-22 19-23 22-24 23-25 33-26 34-27 28-28 28-29 33-30 28-31 33-32 28-34 32-35 32-36 30-38 36-39 37-40 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 9-8 8-9 12-10 13-11 14-12 15-13 16-15 15-16 19-17 17-18 20-19 21-20 22-21 +0-0 3-1 3-2 2-3 4-4 4-5 2-6 7-7 6-8 11-9 11-10 13-11 11-12 13-13 14-14 15-15 +0-0 4-3 2-4 8-5 9-6 10-7 5-8 11-9 13-10 15-12 15-13 17-14 12-15 18-16 3-17 13-18 21-19 16-20 16-21 28-22 25-24 18-25 27-27 19-28 24-29 28-31 26-32 7-33 28-34 29-35 +0-0 1-1 3-2 4-3 5-4 6-5 5-6 8-7 8-8 8-9 3-10 14-11 3-12 15-13 7-14 18-15 20-17 23-18 18-19 23-21 22-22 24-23 26-24 30-25 28-26 29-27 25-28 31-29 33-30 32-31 35-32 36-33 32-34 37-35 +0-0 0-1 2-2 3-3 6-4 2-5 6-6 4-7 5-8 10-9 7-10 12-11 10-12 14-13 16-14 14-15 18-16 19-17 +0-0 0-1 2-2 3-3 4-4 5-5 6-6 5-7 8-8 5-9 8-10 10-11 9-12 11-13 12-14 13-15 14-16 15-17 16-18 17-19 16-20 18-21 17-22 19-23 22-24 20-25 21-26 23-27 24-28 25-29 +0-0 1-1 1-2 2-3 3-4 3-5 5-6 6-7 6-8 8-9 8-10 +1-0 3-1 1-2 4-3 3-4 4-5 5-6 8-7 8-8 8-9 9-10 10-11 11-12 12-13 19-14 15-15 14-17 23-18 18-19 22-20 21-21 21-22 24-23 24-24 +0-0 0-1 1-2 3-3 3-4 4-5 6-6 3-7 8-8 7-9 9-10 10-11 11-12 11-13 13-14 13-15 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 11-9 10-10 11-11 13-12 14-13 13-14 15-15 16-16 18-17 19-18 17-19 20-20 16-21 22-22 +0-0 1-1 1-2 2-3 3-4 5-5 6-6 6-7 18-8 10-9 11-10 13-12 14-13 19-14 20-15 21-16 16-17 26-18 19-19 26-20 27-21 28-22 9-23 23-24 29-25 30-26 27-27 27-28 27-29 29-32 39-33 33-34 35-35 35-36 38-37 39-38 39-39 40-40 41-41 +0-0 1-1 6-2 2-3 2-4 3-5 6-6 7-7 11-8 8-9 7-10 8-11 9-12 12-13 12-14 13-15 +0-0 2-1 2-2 3-3 6-4 7-5 6-6 9-7 8-8 10-9 +0-0 1-1 3-2 4-3 5-4 5-5 7-6 8-7 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 6-7 7-8 +1-0 2-1 3-2 4-3 6-4 5-5 8-6 7-7 9-8 +0-0 1-1 2-2 3-3 3-4 5-5 6-6 7-7 8-8 8-9 9-10 10-11 +0-0 1-1 3-2 2-3 14-4 2-5 4-6 6-7 10-10 9-12 9-13 13-14 14-15 15-16 17-17 17-18 17-20 19-21 20-22 23-23 22-24 24-25 23-26 25-27 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 7-8 8-9 9-10 10-11 11-12 12-13 3-17 19-18 23-21 31-22 25-23 22-24 23-25 31-26 31-27 31-28 29-29 36-32 34-33 38-34 39-35 40-36 35-37 43-38 41-39 44-40 45-42 46-43 48-44 49-45 50-46 51-47 52-48 53-49 53-50 55-51 53-52 42-54 56-55 60-57 56-58 57-59 61-60 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 6-7 8-9 8-10 9-12 11-13 12-14 13-15 14-16 15-17 15-18 17-19 18-20 21-21 17-22 18-23 19-24 21-25 23-26 23-27 +1-0 2-1 3-2 6-3 5-4 6-5 7-6 19-7 7-8 8-9 9-10 9-11 19-12 11-13 13-14 14-15 15-16 16-17 18-18 19-19 16-20 20-22 24-23 22-24 25-25 27-26 24-27 23-28 34-29 29-30 30-31 31-32 32-33 33-34 37-35 35-37 39-38 37-39 39-40 40-41 36-42 42-43 47-44 43-45 44-46 44-47 41-48 37-49 50-50 51-51 53-53 51-54 49-55 51-56 55-57 49-58 51-59 55-60 57-61 60-62 55-64 63-65 64-66 65-67 59-68 67-69 55-70 65-71 55-72 68-74 59-75 +0-0 1-1 1-2 2-3 4-4 6-5 10-6 10-7 11-8 11-9 12-10 13-11 14-12 14-13 17-14 +0-0 1-1 2-2 3-3 5-4 5-5 8-6 7-7 10-8 9-9 11-10 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 5-7 7-8 8-9 9-10 6-11 10-12 12-13 13-14 15-15 14-16 10-17 15-18 16-19 17-20 19-21 20-22 21-23 +0-0 2-1 1-2 3-3 3-4 7-5 4-6 5-7 12-8 10-9 14-10 15-11 13-12 8-13 16-14 17-15 18-16 18-17 20-18 22-19 24-20 25-21 21-22 26-23 +0-0 1-1 4-2 5-4 7-5 5-6 14-7 13-8 8-9 8-10 15-11 19-12 19-13 25-14 24-15 19-16 30-17 24-19 29-21 29-24 31-25 34-26 37-27 34-29 39-31 +0-0 2-1 2-2 4-3 3-4 4-5 7-6 8-7 9-8 10-9 13-10 14-11 15-12 16-13 17-14 19-15 20-16 21-17 21-18 21-19 18-20 22-21 25-22 +0-0 2-1 5-2 2-3 3-4 5-5 6-6 6-7 7-8 7-9 8-10 9-11 8-12 14-14 14-16 16-17 14-18 15-19 16-20 17-21 18-22 21-23 22-24 23-25 24-26 25-27 26-28 27-29 26-30 19-31 26-33 26-34 29-36 29-37 30-38 31-39 29-40 33-42 +0-0 1-1 2-2 4-3 3-4 3-5 1-6 4-7 5-9 6-10 7-11 10-12 6-13 9-14 10-15 12-16 11-17 13-18 15-19 17-20 16-21 15-22 17-23 15-24 19-26 19-27 21-28 21-29 21-30 22-31 23-32 +0-0 1-1 2-2 3-3 5-4 5-5 10-6 7-7 9-8 7-9 12-10 11-11 14-12 13-13 14-14 16-15 +0-0 3-1 3-2 4-3 6-4 5-5 8-6 9-7 10-8 +0-0 0-1 1-2 2-3 3-4 4-5 6-6 6-7 6-8 7-9 +1-0 1-1 3-2 3-3 2-4 5-6 8-7 7-8 7-9 2-10 10-11 10-12 13-13 11-14 12-15 16-16 14-17 16-18 17-19 18-20 20-21 19-22 19-23 21-24 23-25 24-26 +2-0 2-2 1-3 1-4 5-5 4-6 11-7 6-8 7-9 9-10 12-11 13-12 14-13 15-14 16-15 17-16 18-17 18-18 20-19 21-20 22-21 23-22 23-23 25-24 26-25 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 7-7 9-8 12-9 13-10 14-11 15-12 16-13 17-14 19-15 20-16 +0-0 0-1 4-3 2-4 6-5 6-6 6-7 5-8 7-9 10-10 10-11 12-12 11-13 13-14 +1-0 0-1 1-2 4-3 5-4 6-5 2-6 6-7 9-8 7-9 8-10 12-11 14-12 13-13 10-14 16-15 17-16 +0-0 2-1 1-2 3-3 3-4 6-5 5-6 6-7 7-8 10-9 10-10 12-11 13-12 14-13 15-14 16-15 17-16 16-17 20-18 21-19 20-20 23-21 24-22 25-23 26-24 19-25 27-26 +0-0 0-1 3-2 2-3 3-4 5-5 5-6 7-7 7-8 10-9 11-10 12-11 13-12 14-13 14-14 15-15 17-16 17-17 15-18 17-19 17-20 18-21 22-22 22-23 20-24 22-25 30-29 30-31 29-32 32-33 33-34 33-35 34-36 32-37 35-39 36-40 41-41 47-42 40-43 36-44 43-45 46-46 46-47 47-48 54-49 50-50 51-51 51-52 53-53 53-54 54-55 55-56 56-57 58-58 +0-0 1-1 2-2 5-3 6-4 4-5 7-6 8-7 9-8 10-9 8-10 9-11 10-12 8-13 12-14 3-15 11-16 17-17 18-18 13-19 20-20 16-21 16-22 17-23 28-25 14-26 21-27 24-28 25-29 25-30 26-31 26-32 28-33 26-34 32-35 34-36 30-37 30-38 29-39 33-40 35-41 35-42 +0-0 2-1 4-2 2-3 2-4 8-5 9-6 7-7 9-8 13-9 14-10 10-11 10-12 10-13 12-14 7-15 5-16 16-17 15-18 18-19 18-20 19-21 21-22 21-23 22-24 +1-0 3-1 9-2 3-3 4-4 4-5 4-6 6-7 6-8 5-9 11-10 9-11 12-12 13-13 12-14 19-15 14-16 16-17 16-18 15-19 17-20 18-21 20-22 20-23 21-24 22-25 +0-0 2-1 5-2 3-3 2-4 5-5 5-6 4-7 6-8 7-9 10-10 11-11 12-12 10-13 9-14 13-15 14-16 18-19 20-20 21-21 19-22 24-23 22-24 26-25 26-27 26-28 29-30 30-31 30-32 +0-0 1-1 2-2 4-3 4-4 5-5 3-6 6-8 16-9 7-10 11-12 12-13 13-14 14-15 15-16 16-17 19-18 18-19 19-20 21-21 21-22 24-23 25-24 25-25 26-26 29-28 28-30 33-31 34-32 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 12-11 14-12 15-13 16-14 17-15 18-16 +0-0 1-1 3-2 3-3 5-4 7-5 8-6 9-7 7-8 9-9 13-10 14-11 15-12 16-13 17-14 17-15 19-16 20-17 +2-0 3-1 1-2 4-3 4-4 4-5 6-6 5-7 7-8 6-9 16-10 18-11 8-12 13-13 11-14 12-15 14-16 15-17 20-18 22-19 20-20 22-21 21-22 24-23 25-24 27-25 28-26 27-27 28-28 29-29 +0-0 1-1 2-2 4-3 6-4 5-5 4-6 7-7 8-8 9-9 9-10 12-11 13-12 14-13 15-14 16-15 17-16 17-17 18-18 19-19 21-20 22-21 23-22 20-23 24-24 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 20-8 8-9 11-11 15-12 14-13 19-16 12-17 20-18 19-19 20-20 26-21 25-22 22-23 23-24 27-26 13-27 30-28 26-30 27-31 24-33 36-34 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 8-7 10-8 11-9 8-10 12-11 13-12 15-13 16-14 16-15 17-16 +0-0 1-1 2-2 1-3 4-4 2-6 6-7 4-8 5-9 5-10 8-11 7-12 9-13 8-14 11-15 13-16 13-17 12-18 14-19 +0-0 1-1 2-2 2-3 6-4 5-5 9-7 17-8 11-9 12-10 12-11 14-12 14-13 16-14 17-15 18-16 +0-0 1-1 2-2 3-3 4-4 2-5 6-6 21-7 9-8 11-9 16-10 11-11 14-12 11-13 15-14 17-15 10-16 15-17 20-19 17-20 18-21 24-22 24-23 23-24 24-25 25-26 26-27 27-28 28-29 29-30 30-31 31-32 32-33 33-34 34-35 36-36 +0-0 1-1 2-2 3-3 5-4 7-5 7-6 9-7 9-8 10-9 14-10 15-11 15-12 17-13 19-14 21-15 20-16 17-17 23-18 +0-0 1-1 2-2 4-3 3-4 5-5 7-6 8-7 9-8 6-9 11-10 13-11 15-12 15-13 16-14 +0-0 1-1 5-2 2-3 3-4 7-5 8-6 6-7 8-8 11-9 11-10 14-12 13-13 14-14 16-15 14-16 20-18 21-19 22-20 23-21 21-22 25-23 26-24 25-25 28-26 26-27 29-28 30-29 31-30 33-31 33-32 34-33 +0-0 1-1 4-2 3-3 2-4 7-5 5-6 7-8 11-9 10-10 9-11 15-12 16-13 17-14 +0-0 2-1 3-2 4-3 5-4 1-5 1-6 5-7 7-8 8-9 9-10 10-11 10-12 9-13 12-14 10-15 14-16 15-17 17-19 17-20 16-21 18-23 16-24 19-25 +0-0 1-1 4-2 3-3 5-4 7-5 9-6 10-7 +0-0 1-1 1-2 2-3 3-4 3-5 5-6 5-7 6-8 9-9 8-10 10-11 12-12 12-13 13-14 10-15 12-16 12-17 13-18 16-19 15-21 16-22 17-24 18-25 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 8-7 9-8 10-9 11-10 12-11 12-12 14-13 15-14 16-15 15-16 12-17 16-18 12-19 18-20 19-21 20-22 +0-0 1-1 3-3 2-4 4-5 3-6 4-7 4-8 5-9 6-10 8-11 7-12 9-13 10-14 11-15 +0-0 1-1 2-2 4-3 6-4 7-5 8-6 8-7 16-8 9-9 10-10 11-11 12-12 11-13 12-14 15-15 19-17 20-18 22-19 23-20 24-21 25-22 26-23 26-24 27-25 +0-0 1-1 2-2 5-3 6-4 7-5 8-6 8-7 7-8 9-9 9-10 10-11 11-14 12-15 14-16 16-17 16-18 15-19 17-20 +0-0 1-1 2-2 3-3 4-4 3-5 5-6 5-7 5-8 5-9 7-10 8-11 +0-0 1-1 2-2 3-3 4-4 6-5 8-6 9-7 9-8 +0-0 1-1 3-2 4-3 5-4 6-5 8-6 9-7 10-8 2-9 11-10 15-11 12-12 12-13 14-14 16-15 18-19 21-23 23-24 23-25 24-26 +0-0 1-1 1-2 3-3 4-4 5-5 3-6 5-7 5-8 11-9 8-10 9-11 11-12 12-13 11-14 14-15 13-16 7-17 15-19 13-20 19-21 17-22 18-23 20-24 20-26 22-27 23-28 24-29 26-30 25-31 25-32 27-33 +1-1 5-2 5-3 3-4 2-5 6-6 9-7 7-8 12-9 11-10 10-11 11-12 12-13 11-14 14-15 22-16 17-17 20-18 10-19 23-20 24-21 25-22 23-23 24-24 28-25 29-27 31-28 32-29 33-30 35-31 33-32 33-33 36-34 38-35 38-36 37-37 41-38 42-39 43-41 43-42 39-43 45-44 +0-0 3-1 4-2 4-3 4-4 7-5 6-6 11-7 12-8 13-9 14-10 9-11 15-12 +0-0 1-1 3-2 3-3 4-4 5-5 +0-0 0-1 0-2 1-3 3-4 3-5 4-6 2-7 4-8 6-9 6-10 7-11 +0-0 1-1 1-2 3-4 7-5 4-6 6-7 8-8 5-9 10-10 12-11 12-12 12-13 15-14 16-15 17-16 17-17 18-18 19-19 20-20 21-21 23-22 25-23 26-24 28-25 28-26 29-27 34-28 32-29 32-30 29-31 33-32 31-33 36-34 38-37 +0-0 2-1 4-2 4-3 6-4 7-5 8-6 9-7 10-8 11-9 10-10 13-11 14-12 15-13 17-14 14-15 17-16 19-17 20-18 24-20 20-21 24-22 26-23 25-24 25-25 4-26 29-27 27-28 31-30 33-31 32-32 34-34 35-36 37-37 40-38 41-39 41-40 +0-0 2-1 3-2 5-3 4-4 6-5 7-6 9-7 10-8 4-9 11-10 12-11 13-12 16-14 14-15 17-16 18-17 19-19 20-20 19-21 17-23 25-24 26-25 27-26 28-27 25-28 26-29 33-30 24-31 33-32 31-33 33-34 32-35 33-36 35-37 36-38 38-39 39-40 38-41 40-42 37-45 44-46 43-47 47-48 44-49 47-51 50-52 51-53 54-56 55-57 56-58 41-59 57-61 +0-0 4-1 10-2 6-3 7-4 8-5 9-6 5-8 11-9 5-10 14-11 14-12 11-14 17-15 18-16 15-17 15-18 20-19 21-20 22-21 23-22 24-23 23-24 19-25 25-26 27-27 25-29 25-30 25-31 28-32 29-34 29-36 32-38 +0-0 1-1 1-2 4-3 3-4 5-5 8-6 11-7 10-8 13-9 12-10 14-11 +0-0 1-1 3-2 4-3 8-6 8-7 13-8 11-9 14-10 17-11 15-12 17-13 21-14 19-15 20-16 21-17 23-18 24-19 25-20 +0-0 1-1 4-2 4-3 5-4 5-5 17-6 7-7 8-8 9-9 9-10 11-11 13-12 14-13 14-14 16-15 10-16 16-17 20-18 21-19 23-20 24-21 25-22 26-23 27-24 28-25 28-26 31-27 27-28 34-29 26-30 34-31 35-32 36-33 +0-0 2-1 3-2 4-3 5-4 6-5 8-7 8-8 9-9 12-11 14-12 13-13 15-14 16-15 16-16 18-17 19-18 20-19 21-20 21-21 +0-0 2-1 1-2 4-3 5-4 7-5 12-6 8-7 11-8 9-9 12-10 14-11 12-12 13-13 18-14 15-15 20-16 15-17 15-18 18-19 24-20 21-21 23-22 26-23 26-24 26-25 27-26 29-27 30-28 32-29 33-30 +0-0 1-1 1-2 2-3 3-4 5-6 7-7 9-8 8-10 10-11 13-12 14-13 15-14 16-16 19-17 18-18 19-19 21-20 22-21 25-22 30-23 28-24 26-25 31-26 31-27 32-28 30-29 28-30 35-31 35-32 39-33 32-34 37-35 40-36 42-37 40-38 43-39 48-40 49-41 50-42 47-43 44-44 47-45 47-46 51-48 +0-0 1-1 2-2 8-3 4-4 5-5 6-6 7-7 6-8 9-9 9-10 9-11 12-12 10-14 14-16 13-17 15-19 15-21 18-22 19-23 20-24 21-25 22-26 22-27 24-28 17-29 24-30 25-31 +0-0 0-1 4-2 2-3 5-4 6-5 8-6 9-7 11-8 11-9 6-10 11-11 14-12 14-13 15-14 18-15 19-16 20-17 22-18 22-19 21-20 23-21 24-22 +0-0 1-1 2-2 2-3 6-4 8-5 8-6 9-7 6-8 11-9 14-10 12-11 15-12 15-13 17-14 16-15 18-16 19-17 20-18 +0-0 1-1 1-2 2-3 4-4 3-5 5-6 6-7 7-8 8-9 11-10 10-11 10-12 12-13 13-14 +0-0 1-1 1-2 1-3 3-4 3-5 6-6 7-7 7-8 7-9 9-10 9-11 12-12 11-13 10-14 12-15 13-16 14-17 +0-0 7-1 2-2 3-3 4-4 5-5 6-6 7-7 7-8 9-9 10-10 11-11 12-12 13-13 12-14 14-15 15-16 16-17 20-18 17-19 20-20 18-21 36-22 20-23 20-24 22-25 24-26 25-27 26-28 28-29 27-30 29-32 32-33 34-34 34-35 35-36 36-37 37-38 37-39 40-40 42-42 42-43 43-44 +0-0 1-1 2-3 2-4 5-5 3-6 6-7 7-8 7-9 10-10 11-11 14-12 15-13 17-14 16-15 20-16 21-17 19-18 22-19 25-22 26-23 29-24 29-25 30-26 +0-0 1-1 2-2 5-3 6-4 3-5 6-6 7-7 7-8 8-9 12-10 10-11 14-14 17-15 41-16 17-17 20-18 22-19 20-20 17-21 21-22 24-23 23-24 27-25 28-26 29-27 30-28 29-29 32-30 33-31 36-32 37-33 37-34 38-35 38-36 37-37 44-38 42-39 43-40 44-41 45-42 +0-0 1-1 1-2 4-3 5-4 3-5 6-6 8-7 8-8 6-9 7-10 11-11 11-12 12-13 13-14 14-15 17-16 16-17 17-18 18-19 +0-0 2-1 2-2 5-3 7-4 12-5 8-6 9-7 10-8 13-10 +0-0 3-1 2-2 1-3 5-4 9-6 7-7 8-8 12-9 10-10 16-11 13-12 9-13 14-14 21-16 12-17 25-18 26-19 19-23 23-24 20-25 31-26 30-29 32-32 35-34 41-35 36-36 38-38 39-39 40-40 44-41 44-42 44-44 44-45 45-46 46-47 49-48 51-50 57-51 56-52 48-53 54-54 49-55 48-56 47-57 55-58 +0-0 5-1 3-2 2-3 1-4 4-5 6-6 7-7 5-8 6-9 9-10 10-11 10-12 11-13 12-14 14-15 20-17 15-18 16-19 20-20 27-22 28-23 29-24 22-25 26-27 31-28 26-29 33-30 34-31 30-33 25-34 38-35 34-37 16-38 34-39 36-40 37-41 36-42 41-44 41-45 39-46 43-47 +0-0 1-1 1-2 3-3 2-4 5-5 8-6 4-7 7-8 9-9 10-10 6-11 11-12 8-13 8-14 11-15 14-16 13-17 14-18 16-19 15-20 17-21 +0-0 1-1 3-2 3-3 4-4 2-5 6-6 6-7 7-8 9-9 10-10 11-11 11-12 12-13 14-14 13-15 17-16 17-17 18-18 +0-0 1-1 4-2 3-3 4-4 4-5 7-6 8-7 19-8 12-9 12-10 14-11 16-12 15-14 15-15 16-16 17-17 22-18 16-19 41-20 18-21 25-22 24-23 26-24 27-25 28-26 26-27 30-28 29-29 32-30 34-31 30-32 30-33 29-34 44-35 30-36 41-37 39-38 33-39 38-40 40-41 37-42 39-43 38-44 42-45 47-47 38-49 45-50 48-52 49-53 50-54 +0-0 0-1 4-2 6-3 5-4 6-5 8-6 10-7 10-8 12-9 13-10 14-11 15-12 16-13 17-14 +0-0 1-1 1-2 1-3 3-4 4-5 5-6 6-7 7-8 +0-0 1-1 3-2 3-3 5-4 6-5 7-6 7-7 8-8 10-9 13-10 14-11 11-12 15-13 +0-0 1-1 3-2 3-3 5-4 6-5 7-6 8-7 8-8 9-9 10-10 15-11 13-12 11-13 14-14 16-15 16-16 18-17 18-18 19-19 21-20 +0-0 1-1 2-2 4-3 5-4 2-5 6-6 7-7 8-8 10-9 11-10 12-11 13-13 13-14 14-15 15-16 16-17 17-18 19-19 20-20 20-21 21-22 22-23 +0-0 0-1 12-2 4-3 5-4 6-5 7-6 8-7 11-8 8-10 15-11 16-12 14-13 15-14 20-15 20-16 19-17 22-18 18-19 19-20 23-21 21-22 25-23 27-24 26-25 28-26 +0-0 1-1 1-2 4-3 4-4 4-5 5-6 7-7 8-8 9-9 10-10 9-11 11-12 13-13 14-14 15-15 13-16 13-17 17-18 16-19 19-20 18-21 20-22 +0-0 4-1 2-2 5-3 5-4 5-5 7-6 4-7 9-8 10-9 11-10 16-11 16-12 16-13 21-15 20-16 20-17 23-18 22-19 24-20 25-21 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 7-7 6-8 8-9 8-10 8-11 11-12 10-13 12-14 12-15 +0-0 0-1 2-2 3-3 5-4 5-5 6-6 5-7 7-9 2-10 21-11 12-12 16-13 16-15 18-16 19-17 16-18 17-19 14-20 15-21 13-22 20-24 21-25 23-26 23-28 31-29 32-30 26-31 34-32 25-33 33-34 34-35 30-36 29-37 24-38 25-39 28-40 27-41 28-42 37-43 37-44 38-45 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 8-7 8-8 11-9 3-10 12-11 13-12 16-13 15-14 18-15 17-16 19-17 20-18 21-19 23-20 20-22 24-23 25-24 26-25 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 6-8 7-9 9-10 10-11 11-12 12-13 8-14 13-16 13-17 15-18 14-19 17-20 18-21 20-22 21-24 20-25 19-26 22-27 23-28 +0-0 1-1 2-2 3-3 2-4 3-5 5-6 5-7 6-8 9-9 9-10 8-11 10-12 11-13 12-14 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 7-7 6-8 8-9 9-10 10-11 16-12 12-13 15-14 14-15 15-16 14-17 15-18 21-19 22-20 23-21 24-22 25-23 19-24 20-25 23-26 24-27 27-29 30-30 28-32 33-33 28-34 32-35 32-36 33-37 +0-0 1-1 2-2 3-3 5-4 4-5 8-6 7-7 10-8 12-9 13-11 14-12 11-13 17-14 10-15 19-16 17-17 18-18 25-19 27-20 22-21 30-22 25-23 26-24 26-25 23-26 24-27 27-28 30-29 31-30 +0-0 0-1 1-2 4-3 5-4 4-5 3-6 3-7 6-8 9-9 10-10 11-11 12-12 7-13 12-14 12-15 13-16 15-17 20-18 12-19 28-21 19-22 18-24 28-25 39-26 15-27 24-28 25-29 25-30 27-31 26-32 32-33 29-34 34-35 31-36 32-37 33-38 31-39 37-40 38-41 30-43 39-44 39-45 +0-0 1-1 2-2 3-3 6-4 4-6 7-7 7-8 8-9 9-10 7-11 10-12 +0-0 2-1 6-2 1-3 2-4 3-5 5-6 6-7 7-8 8-9 11-11 11-12 9-13 10-14 11-15 13-16 12-17 16-18 13-19 14-20 19-22 20-23 19-24 21-25 22-26 +0-0 1-1 2-2 2-3 4-4 1-5 5-6 5-7 6-8 7-9 8-11 8-12 9-13 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 6-7 7-8 9-9 8-10 13-11 10-12 11-13 12-15 13-16 14-17 16-18 16-19 17-20 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-9 11-10 11-11 10-12 11-13 14-14 8-15 18-16 15-17 16-19 19-20 20-21 20-22 21-23 +9-0 0-1 2-2 3-3 3-4 5-5 2-6 7-7 7-8 10-9 12-10 15-12 16-14 11-15 12-16 17-17 17-18 24-19 17-20 19-21 20-22 21-23 22-24 23-25 24-26 25-27 25-28 26-29 30-30 31-31 29-32 28-33 29-35 32-36 +0-0 3-1 3-2 3-3 6-4 6-5 6-6 7-7 9-8 10-9 11-10 12-11 11-12 10-13 15-14 9-15 14-16 17-17 16-19 19-21 19-22 20-23 22-24 23-25 21-26 25-27 26-28 24-29 27-30 28-31 30-32 22-33 32-34 28-35 27-36 27-37 32-38 35-39 36-40 32-41 36-42 +0-0 1-1 3-2 2-3 4-4 4-5 5-6 8-7 5-8 9-9 6-10 12-12 12-13 17-14 12-15 16-16 19-20 19-21 20-22 22-23 23-24 +0-0 2-1 2-2 4-3 6-4 4-5 8-6 6-7 7-8 10-9 10-10 11-11 12-12 13-13 14-14 14-15 15-16 15-17 +0-0 1-1 2-2 3-3 3-4 4-5 6-6 5-7 6-8 7-9 8-10 9-11 11-12 13-13 10-14 12-15 14-16 13-17 20-18 17-19 19-20 18-21 19-22 21-23 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 7-7 8-8 9-9 9-10 10-11 11-12 +0-0 1-1 4-2 1-3 3-4 3-5 5-6 5-7 6-8 8-9 8-10 9-11 9-12 10-13 11-14 12-15 14-16 14-17 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 8-7 7-8 12-10 15-11 13-12 17-13 17-14 16-15 18-16 16-17 24-18 23-19 25-20 27-22 27-23 28-24 29-25 30-26 +1-0 2-1 4-3 6-4 7-5 8-6 10-7 8-8 11-9 11-10 5-11 15-12 16-14 25-15 26-16 16-17 20-19 22-20 22-21 25-22 24-23 25-24 27-25 27-26 28-27 27-28 29-29 28-30 31-31 34-32 41-33 37-34 38-35 37-36 40-37 41-39 +0-0 1-1 2-2 3-3 4-4 5-5 3-6 6-7 7-8 3-10 15-11 10-12 11-13 12-14 13-15 14-16 15-17 15-18 15-19 24-20 23-21 22-24 22-25 21-26 22-27 23-28 24-29 23-30 33-31 24-32 26-33 27-34 33-35 32-37 33-38 33-39 35-40 38-41 32-42 32-43 37-44 32-45 37-46 38-47 39-48 40-49 41-50 43-51 44-52 45-53 47-54 45-55 48-56 49-57 50-58 51-59 52-60 52-61 54-62 54-63 55-64 58-66 59-67 60-68 61-69 62-70 63-71 64-72 65-73 66-74 67-75 68-76 +2-0 0-1 4-2 4-3 8-4 5-5 8-7 8-8 14-9 15-11 13-12 13-13 12-14 16-15 18-16 24-19 19-20 19-21 29-22 30-23 31-24 32-25 33-26 32-27 30-28 26-29 30-30 35-32 36-34 37-35 33-36 38-37 38-38 40-39 +0-0 7-1 4-2 6-3 9-5 10-6 8-7 14-9 12-10 16-11 18-12 16-13 14-14 22-15 20-17 21-18 17-19 23-20 24-21 25-22 28-23 29-24 33-25 32-26 33-27 20-28 36-30 34-31 37-32 37-33 39-34 40-35 42-36 43-37 +0-0 1-1 2-2 4-4 5-5 7-6 10-7 11-8 9-9 8-10 12-11 14-12 13-13 14-14 15-15 16-16 19-17 18-18 19-19 23-20 23-21 24-22 24-23 25-24 +0-0 3-1 4-2 6-3 13-4 7-5 8-6 10-7 11-8 14-9 15-10 16-11 17-12 18-13 19-14 +0-0 3-1 1-2 4-3 4-4 7-5 5-6 9-7 7-8 8-9 8-10 9-11 12-12 11-13 14-14 13-15 15-16 17-17 17-18 17-19 22-20 23-21 24-22 25-23 22-24 24-25 21-26 18-27 26-28 +0-0 1-1 1-2 3-3 4-4 2-5 7-7 7-8 7-9 8-10 8-11 10-12 7-13 11-14 8-15 12-16 14-17 11-18 15-19 15-20 18-21 16-22 18-23 18-24 19-25 21-26 22-27 +0-0 1-1 2-2 3-3 +0-0 0-1 5-2 6-3 8-5 1-6 5-7 11-9 11-10 12-11 14-13 15-14 16-15 17-16 12-17 16-18 21-19 19-20 21-21 22-22 22-23 23-24 25-25 27-26 26-27 28-28 29-29 31-30 30-31 32-32 24-33 33-34 +0-0 0-1 2-2 3-3 4-4 4-5 4-6 5-7 19-8 8-9 9-10 10-11 12-13 14-15 15-16 16-17 18-19 20-20 23-21 21-22 23-23 22-24 25-25 26-26 24-27 28-28 29-29 30-30 31-31 30-32 33-33 33-34 34-35 +2-0 2-1 3-2 4-3 5-4 6-5 7-6 9-7 10-8 10-9 12-10 14-11 10-12 20-13 17-15 19-16 22-17 24-19 22-20 22-21 26-22 29-23 30-24 38-25 28-26 9-28 33-29 36-30 36-31 38-32 39-33 40-34 41-35 43-36 44-37 44-38 44-39 46-40 +0-0 0-1 2-2 3-3 4-4 6-5 6-6 8-7 11-8 9-9 15-10 4-11 13-14 9-15 25-16 15-18 17-19 17-20 16-21 24-22 20-23 21-24 30-25 21-26 21-27 27-29 26-30 26-31 16-33 31-35 26-36 32-37 33-38 34-39 35-40 +0-0 1-1 2-2 3-3 4-4 6-5 5-6 7-7 9-8 8-9 10-10 12-11 11-12 13-13 +0-0 2-1 8-2 3-3 7-4 5-5 3-6 11-7 6-8 9-9 8-10 9-11 10-12 11-13 12-14 13-15 +0-0 1-1 2-2 2-3 4-4 5-5 5-6 6-7 7-8 8-9 +0-0 2-1 3-2 3-3 6-4 5-5 6-6 7-7 +0-0 2-2 2-3 3-4 4-5 5-6 7-7 8-8 10-9 11-10 12-11 13-12 14-13 16-14 17-15 18-16 18-17 22-18 20-19 21-20 24-21 24-22 22-23 9-24 27-25 28-26 26-28 30-29 29-30 34-31 34-32 34-33 37-34 36-35 37-36 39-37 +0-0 1-1 2-2 2-3 1-4 4-5 5-6 5-7 5-8 7-9 17-10 0-11 15-12 16-13 10-14 12-15 14-16 11-17 12-18 15-19 17-20 +0-0 3-1 3-2 4-3 5-4 +0-0 1-1 5-2 4-3 4-4 5-5 7-6 6-7 8-8 10-9 9-10 10-11 11-12 12-13 +0-0 3-1 3-2 14-3 5-5 7-6 10-7 21-9 10-10 12-11 12-12 12-13 16-14 14-15 18-16 15-17 15-18 16-19 15-20 5-21 22-22 23-23 25-24 27-25 29-26 29-28 29-29 32-31 34-32 33-33 35-34 31-35 36-36 38-37 38-38 37-39 38-40 38-41 41-42 42-43 46-44 46-45 44-46 46-47 49-48 41-49 49-50 +0-0 1-1 1-2 3-3 3-4 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 7-9 8-10 10-11 11-12 12-13 9-14 16-16 14-17 15-18 16-19 10-20 22-21 19-22 22-23 22-25 26-26 28-27 25-28 28-29 29-30 32-33 +1-0 2-1 1-2 3-3 3-4 6-5 5-6 8-7 9-8 10-9 12-10 8-11 19-12 16-14 17-15 18-16 19-17 20-18 21-19 22-20 21-21 23-22 24-23 24-24 25-25 26-26 +0-0 1-1 2-2 2-3 5-4 4-5 3-6 8-7 5-8 10-9 6-10 11-11 10-12 2-13 12-14 14-15 14-16 11-17 15-18 17-19 18-20 19-21 20-22 21-24 23-25 24-26 25-27 26-28 27-29 33-30 24-31 30-32 31-34 31-35 35-36 34-37 35-38 36-39 37-40 38-41 36-42 39-43 +0-0 1-1 3-2 4-3 8-4 7-5 10-6 13-7 5-8 15-10 12-11 13-12 13-13 17-14 19-15 19-16 21-17 25-18 21-19 25-20 26-21 27-22 28-23 31-24 31-25 34-26 28-27 35-28 37-29 36-30 36-31 40-32 41-33 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 9-8 10-9 13-11 11-12 11-13 13-14 15-15 16-16 19-17 20-18 19-19 18-20 21-21 21-22 25-23 25-24 17-25 26-26 26-27 27-28 +3-0 1-1 2-2 3-3 4-4 5-5 7-6 8-7 9-8 10-9 11-10 15-13 12-14 16-15 16-16 19-17 21-18 21-19 18-20 23-22 23-23 24-24 25-25 27-26 27-27 29-28 30-29 33-30 32-31 33-32 33-33 34-34 40-35 41-36 39-37 42-38 35-39 42-40 +0-0 1-1 4-2 3-3 5-4 6-5 7-6 8-7 11-9 10-10 9-11 14-12 17-13 9-14 15-15 20-18 23-19 20-20 28-21 24-22 21-23 22-24 23-25 27-26 27-27 29-28 31-29 34-30 35-31 33-32 33-33 37-34 37-35 39-36 36-37 40-38 39-39 44-41 45-42 45-43 46-44 +0-0 4-1 2-2 3-3 5-4 6-5 10-6 9-7 10-8 14-9 13-10 7-11 15-12 15-13 17-14 18-15 19-16 22-17 17-18 17-19 21-20 17-21 25-22 23-23 29-24 28-26 29-27 30-28 37-30 38-31 32-32 39-33 +0-0 0-1 2-2 2-3 4-4 5-5 5-6 6-7 7-8 8-9 8-10 9-11 +0-0 2-1 3-2 4-3 2-4 5-5 6-6 18-7 9-8 10-9 10-10 12-11 12-12 14-13 16-14 12-15 13-16 20-17 22-18 18-19 20-20 21-21 21-22 23-23 26-24 27-25 30-26 29-27 30-28 32-29 +0-0 2-1 3-2 4-3 4-4 6-5 8-6 12-9 10-10 11-11 14-12 15-13 16-14 +0-0 1-1 15-2 3-3 4-4 5-5 6-6 7-7 9-9 10-10 13-11 12-12 13-13 8-14 13-15 14-16 17-17 19-19 16-20 15-21 20-22 21-23 22-24 23-25 25-26 26-27 26-28 26-29 27-30 28-31 29-32 30-33 31-34 32-35 29-36 32-37 36-38 34-39 37-40 38-41 +0-0 2-1 0-2 3-3 4-4 6-5 8-6 5-7 10-8 9-9 12-10 11-11 12-12 13-13 14-14 14-15 17-16 18-17 19-18 21-19 22-20 23-21 24-22 22-23 22-24 26-25 24-26 29-28 20-29 28-30 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 12-11 21-12 14-13 14-14 16-15 17-16 20-17 17-18 19-19 22-20 23-21 +0-0 1-1 3-2 4-3 8-4 7-5 6-6 5-7 10-8 11-9 11-10 14-11 15-12 15-13 16-14 18-15 19-16 20-17 17-18 21-19 +0-0 1-1 1-2 2-3 +0-0 3-1 5-2 5-3 7-4 9-5 6-6 10-8 10-9 11-10 12-11 15-12 16-13 +0-0 0-1 1-2 2-3 3-4 4-5 5-6 6-7 13-8 12-9 12-10 16-11 18-12 17-13 20-14 21-15 23-16 19-17 23-18 23-19 25-20 24-21 24-22 28-23 +0-0 0-1 1-2 5-3 4-4 5-5 2-6 3-7 4-8 16-9 9-10 13-11 12-12 15-13 13-14 13-15 14-16 17-17 18-18 21-19 21-20 21-21 22-22 +0-0 2-1 3-2 6-3 5-4 5-5 2-6 7-7 8-8 9-9 10-10 9-11 11-12 11-13 12-14 13-15 15-16 15-17 +0-0 1-1 2-2 11-3 2-4 5-5 5-6 6-7 12-8 9-9 11-10 10-11 12-12 12-13 13-14 16-15 17-16 15-17 14-18 18-19 19-20 21-21 22-22 19-23 20-24 23-25 25-26 26-27 27-28 30-29 24-30 28-31 30-32 31-33 35-34 34-35 34-36 36-37 38-39 39-40 40-41 41-42 42-43 43-44 +0-0 1-1 2-2 5-3 4-4 6-5 7-6 6-7 8-9 11-10 12-11 13-12 10-13 15-14 9-15 18-16 15-17 15-18 16-19 17-20 18-21 +0-0 1-1 2-2 3-3 3-4 4-5 +0-0 1-1 2-2 7-3 5-4 4-5 8-6 4-7 9-8 12-9 10-10 15-11 14-12 16-14 17-15 18-16 23-18 21-19 22-20 21-21 21-22 26-23 25-24 28-25 29-26 30-27 30-29 25-30 32-31 33-32 +0-0 1-1 2-2 3-3 2-4 3-5 4-6 5-7 6-8 7-9 8-10 9-11 11-12 10-13 12-14 14-15 14-16 15-17 16-18 17-19 16-20 17-21 20-22 21-23 +0-0 3-1 2-2 3-3 8-4 9-5 12-6 11-7 11-8 13-9 14-10 15-11 16-12 +0-0 1-1 2-2 1-3 4-4 6-5 7-6 3-7 9-8 8-9 10-10 15-12 16-13 17-14 17-15 18-16 17-17 21-18 22-20 23-21 24-22 26-23 28-25 26-26 25-27 28-28 29-29 31-30 31-31 33-32 33-33 +0-0 2-1 3-2 2-3 5-4 6-5 7-6 4-7 6-9 9-10 12-11 11-12 13-13 14-14 14-15 16-16 16-17 15-18 18-19 19-20 20-21 20-22 20-23 23-24 24-25 23-26 24-27 26-28 27-29 28-30 +0-0 3-1 4-2 5-3 6-4 +0-0 1-1 2-2 5-3 5-4 6-5 7-6 8-7 +0-0 1-1 4-2 5-3 4-4 9-5 9-6 8-7 9-8 9-9 13-10 12-11 13-12 14-13 9-14 16-15 17-16 +0-0 1-1 1-2 2-3 3-4 4-5 4-6 5-7 +0-0 1-1 2-2 4-3 +0-0 1-1 3-3 4-4 7-5 8-6 9-7 10-8 11-9 12-10 13-11 14-12 15-13 16-14 18-15 18-16 20-17 21-18 22-19 21-20 23-21 21-22 24-23 24-24 26-25 27-26 28-27 29-28 +0-0 1-1 3-2 4-3 6-4 6-5 9-6 +0-0 4-1 2-2 2-3 5-4 8-5 9-6 8-7 10-9 12-10 11-11 15-12 12-13 18-14 20-15 17-16 20-17 21-18 20-19 23-20 25-21 +0-0 1-1 2-2 4-3 5-4 8-5 9-6 10-7 11-8 12-9 12-10 14-11 15-12 16-13 17-14 +0-0 0-1 1-2 4-3 4-4 5-5 2-6 7-7 7-8 8-9 10-10 8-11 10-12 7-13 12-14 11-15 12-16 15-17 15-18 16-19 17-20 15-21 22-22 16-23 26-25 20-26 22-27 26-28 22-29 23-30 24-31 25-32 26-33 29-35 31-36 31-37 31-38 32-39 32-40 33-41 36-43 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 8-9 9-10 10-11 +0-0 0-1 2-2 4-3 4-4 6-5 6-6 8-7 9-8 +2-0 3-1 4-2 4-3 7-4 8-5 5-6 9-7 10-8 9-9 11-10 14-11 13-12 14-13 14-14 21-17 21-18 21-19 23-20 23-21 24-22 25-23 27-24 29-25 27-26 19-27 29-28 36-29 27-30 36-31 35-32 37-33 40-34 40-35 41-36 41-37 +0-0 0-1 1-2 3-3 4-4 1-5 6-6 7-7 6-8 12-9 13-10 10-11 10-12 11-13 14-14 15-15 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 7-7 6-8 8-9 9-10 11-11 11-12 +0-0 4-1 1-2 4-5 2-6 3-7 3-8 6-9 8-10 8-11 11-12 10-13 9-14 21-15 12-16 14-18 22-19 15-20 18-21 16-22 17-23 20-24 25-26 26-27 25-28 23-29 24-30 28-31 51-32 28-33 29-35 31-36 32-37 33-38 33-39 35-40 36-41 39-42 38-43 39-44 42-45 43-46 44-47 48-49 47-50 46-51 47-52 49-53 47-54 46-55 50-56 50-57 53-58 54-59 55-60 58-61 56-62 51-63 59-64 +0-0 1-1 2-2 3-3 4-4 3-5 5-6 6-7 7-9 8-10 9-11 14-12 13-13 10-14 13-15 14-16 14-17 16-18 17-19 18-20 14-21 18-22 19-23 20-24 21-25 +0-0 1-1 4-2 3-3 3-4 6-5 7-6 8-7 5-8 9-9 11-10 12-11 4-12 9-13 13-15 14-16 14-17 15-18 19-21 21-22 22-23 19-24 20-25 24-26 28-27 23-28 24-29 25-30 26-31 26-32 26-33 27-34 33-35 29-36 30-37 28-38 32-39 32-40 32-41 34-42 28-43 36-44 +0-0 1-1 8-2 5-3 5-4 3-5 16-8 18-9 19-10 9-11 11-12 12-13 13-14 14-15 16-16 17-17 20-18 22-19 25-20 21-21 23-22 24-23 25-24 26-25 29-26 30-27 29-28 33-29 31-30 26-31 29-32 42-33 34-34 40-36 34-37 39-38 41-39 41-40 35-41 37-43 43-44 46-45 36-46 47-47 49-48 +0-0 1-1 2-2 2-3 5-4 7-5 4-6 8-7 9-8 3-9 10-10 10-11 13-12 12-13 19-14 17-16 19-17 20-18 18-19 21-20 22-21 20-22 24-23 23-24 28-25 28-26 27-27 31-28 31-29 32-30 33-31 34-32 36-33 +0-0 3-1 2-2 5-3 5-4 4-5 8-6 9-7 10-8 11-9 12-10 13-11 3-12 15-13 15-14 15-15 17-16 20-17 18-18 23-19 24-20 23-21 25-22 26-23 27-24 28-25 25-26 30-28 30-29 31-30 31-31 34-32 34-33 35-34 +0-0 1-1 1-2 2-3 4-4 4-5 5-6 7-7 9-8 8-9 10-10 9-11 13-12 12-14 15-15 16-16 16-17 22-18 17-19 24-20 21-21 26-22 20-23 21-24 24-25 19-26 25-27 27-28 +0-0 0-1 3-2 2-3 5-5 6-6 6-7 8-9 12-10 11-11 14-12 16-13 15-14 16-15 19-16 19-17 18-18 20-19 +0-0 1-1 2-2 3-3 8-4 4-5 9-6 5-7 11-8 14-9 10-10 16-11 13-12 10-13 12-14 12-15 13-17 7-18 17-19 17-20 +1-0 2-1 4-2 4-3 7-4 7-5 9-6 10-7 8-8 13-9 14-10 16-11 11-12 15-13 16-14 17-15 18-16 21-17 21-18 +0-0 2-1 4-2 5-3 6-4 7-5 8-6 9-7 10-8 +0-0 0-1 2-2 3-3 4-4 5-5 6-6 9-7 8-8 14-9 12-10 11-11 10-12 11-13 15-14 +0-0 2-1 3-2 4-3 6-4 6-5 9-6 10-7 +0-0 2-1 1-2 4-3 3-4 6-5 8-6 10-7 11-8 9-9 13-10 12-11 15-12 16-13 17-14 18-15 19-16 +1-0 1-1 0-2 3-3 4-4 4-5 19-6 7-7 12-8 10-9 9-10 8-11 12-12 13-13 13-14 18-15 18-16 16-17 17-18 18-19 19-20 23-21 18-22 29-23 36-24 25-25 26-26 27-27 30-28 36-29 39-31 30-32 50-33 36-34 36-35 41-36 42-37 41-38 46-39 50-40 43-41 49-42 48-43 22-44 33-45 51-47 52-48 +0-0 3-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 25-12 12-13 17-14 15-15 16-16 16-17 19-18 20-19 22-20 23-21 24-22 25-23 26-24 27-25 27-26 29-27 30-28 30-29 30-31 32-32 34-33 33-34 35-35 37-36 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 4-7 8-9 10-10 11-11 9-12 12-13 15-15 16-17 16-18 18-19 16-21 21-23 22-24 23-25 24-26 23-27 27-29 28-30 29-31 32-33 31-34 33-35 34-36 35-37 33-38 38-39 39-40 35-41 35-42 36-43 39-44 40-45 39-46 37-47 39-48 42-49 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 10-8 11-9 12-10 11-11 15-12 15-13 18-14 19-15 20-16 21-17 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 9-9 9-10 10-11 13-12 12-13 13-14 15-15 16-16 16-17 17-18 18-19 +0-0 1-1 2-2 2-3 5-4 4-5 8-7 9-8 9-9 10-10 9-11 11-12 11-13 13-14 12-15 20-17 22-18 9-19 15-20 17-21 17-22 17-23 18-24 22-27 23-28 23-29 23-30 25-31 26-32 27-33 24-34 26-35 28-36 +0-0 1-2 2-3 3-4 5-5 7-6 8-7 9-8 9-9 11-10 12-11 +0-0 3-1 2-2 2-3 4-4 4-5 5-6 8-7 8-8 9-9 5-10 10-11 10-12 12-13 13-14 14-15 16-16 15-17 16-18 17-19 18-20 19-21 20-22 +0-0 3-2 3-3 6-4 8-5 9-6 9-7 10-8 11-9 13-10 19-11 11-12 18-13 22-14 22-15 24-17 +0-0 1-1 1-2 5-3 4-4 2-5 9-7 9-8 10-9 11-10 10-11 4-12 11-13 14-14 14-15 13-16 16-17 18-18 20-20 22-21 22-22 22-23 23-24 21-25 24-26 23-27 25-28 27-29 25-30 26-31 29-32 +0-0 0-1 3-2 4-3 5-4 6-5 7-6 7-7 9-8 +0-0 1-1 1-2 2-3 3-4 4-5 4-6 5-7 5-8 +1-0 1-1 2-2 3-3 4-4 4-5 8-6 7-7 5-8 6-9 8-10 9-11 10-12 12-13 12-14 13-15 12-16 14-17 +1-0 2-1 0-2 3-3 4-4 5-5 6-6 7-7 7-8 9-9 10-10 11-11 12-13 15-14 13-15 14-16 16-17 16-18 17-19 18-20 +0-0 0-1 2-2 5-3 3-4 3-5 4-6 8-8 6-9 8-10 9-11 11-12 11-13 12-14 14-15 14-16 15-17 16-18 +0-0 1-1 2-2 3-3 7-4 5-5 6-6 6-7 8-8 8-9 7-10 10-11 11-12 12-13 9-14 13-15 14-16 16-17 17-18 17-19 19-20 20-21 26-22 22-24 21-25 23-26 25-27 24-28 24-29 23-30 27-31 +0-0 2-1 3-2 7-3 6-4 7-5 8-6 9-7 10-8 10-9 12-10 13-11 16-12 16-13 18-14 19-15 21-16 +0-0 1-1 2-2 5-3 6-4 4-5 4-6 3-7 5-8 5-9 9-10 8-11 9-12 8-13 10-14 10-15 11-16 13-18 15-19 14-20 12-21 17-22 18-23 19-24 20-25 23-26 24-27 25-29 26-30 27-31 26-32 26-33 27-34 29-35 +0-0 4-1 3-2 3-3 5-4 5-5 7-6 6-7 6-8 11-9 9-10 13-11 11-13 12-14 4-15 13-16 16-18 15-19 16-20 17-21 18-22 19-23 20-24 +0-0 1-1 3-2 4-3 4-4 6-5 5-6 4-7 8-8 9-9 +0-0 1-1 3-2 2-3 4-4 4-5 25-6 24-7 3-8 28-9 7-10 12-11 29-12 14-13 11-14 12-15 13-16 5-17 7-18 15-19 19-20 20-21 8-22 26-23 21-24 23-25 23-26 27-27 16-28 18-29 26-30 21-31 20-32 22-33 31-34 31-35 32-36 +2-0 2-1 8-2 3-3 3-4 4-5 6-6 6-7 9-8 10-9 10-10 12-11 13-12 14-13 14-14 10-15 16-16 17-17 17-18 18-19 23-20 21-22 22-23 25-24 25-25 25-26 26-27 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-8 8-9 10-10 10-11 11-12 12-13 +0-0 1-1 2-2 3-3 5-4 7-5 8-6 10-7 9-8 15-9 11-10 11-11 9-12 13-13 14-15 16-16 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 8-8 10-9 11-10 +0-0 1-1 1-2 2-3 3-4 3-5 4-6 6-7 5-8 6-9 8-10 8-11 9-12 10-13 +0-0 6-1 5-2 2-3 3-4 7-5 8-6 5-7 9-8 7-9 11-10 9-11 12-12 16-14 15-15 15-16 29-17 17-18 18-19 20-20 27-21 17-22 27-23 23-24 27-25 24-26 25-27 28-28 25-29 26-30 28-31 30-32 31-33 38-34 34-35 38-36 35-37 36-38 41-39 41-40 38-41 41-42 40-43 44-44 28-45 46-46 49-47 49-48 50-49 +0-0 1-1 2-2 3-3 3-4 4-5 7-6 9-7 10-8 11-9 12-10 11-11 12-12 16-13 12-14 13-15 14-16 15-17 16-18 18-19 18-20 19-22 21-23 22-24 24-25 23-26 25-27 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 6-7 9-8 8-9 10-10 +0-0 3-1 5-2 1-3 3-4 4-5 9-7 8-8 9-9 10-10 11-11 13-12 14-13 15-14 17-16 18-17 16-18 20-19 23-20 23-22 25-23 29-24 27-25 28-26 28-27 30-28 31-29 32-30 +0-0 2-1 3-2 4-3 5-4 7-5 8-6 9-7 10-8 11-9 12-12 13-13 14-14 17-15 18-16 19-17 21-18 22-19 22-20 23-21 24-22 26-23 27-24 27-25 29-26 29-27 30-28 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 5-7 5-8 7-9 8-11 9-13 10-15 12-16 13-17 12-18 10-19 14-20 15-21 +1-0 1-1 1-2 3-3 4-4 5-5 6-6 7-7 8-8 +3-0 0-1 3-2 3-3 4-4 5-5 5-6 6-7 7-8 +0-0 1-1 2-2 3-3 5-4 4-5 6-6 8-7 9-8 10-9 11-10 11-11 13-12 14-13 18-14 15-15 15-16 16-17 16-18 19-19 19-20 +3-0 4-1 3-2 4-3 1-4 2-5 7-6 10-7 9-8 10-9 11-10 11-11 13-13 13-14 13-15 16-16 15-17 17-18 18-19 22-20 8-21 38-22 24-23 23-25 23-26 25-27 26-28 29-30 30-31 33-32 34-33 36-34 26-35 36-36 38-37 +0-0 2-1 1-2 1-3 15-5 16-6 10-7 10-8 9-9 11-10 10-11 11-12 14-13 13-14 24-15 14-16 18-17 7-18 14-19 18-20 20-21 22-22 20-23 13-24 24-25 25-26 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 7-7 8-8 8-9 11-10 12-11 13-13 37-14 14-15 15-16 16-17 17-18 18-19 19-20 20-21 21-22 21-23 22-24 23-25 24-26 25-27 27-28 26-29 31-30 28-31 29-32 30-33 32-34 33-35 34-36 36-37 37-38 38-39 40-40 9-41 39-42 39-43 42-44 +0-0 1-1 3-2 3-3 4-4 5-5 6-6 5-10 18-11 10-12 10-13 12-14 14-15 14-16 17-17 14-18 18-19 19-20 21-21 22-23 25-25 23-26 27-27 22-29 27-30 28-31 24-32 +0-0 1-1 2-2 5-3 2-4 4-5 3-6 8-7 7-8 11-10 11-11 12-12 8-13 12-14 12-15 13-16 +0-0 1-1 3-2 4-4 5-5 7-6 6-7 8-8 9-9 10-10 11-11 12-12 13-13 14-14 15-15 8-16 17-17 18-18 +0-0 1-1 3-2 4-3 4-4 5-5 9-6 7-7 7-8 9-9 10-10 11-11 +0-0 1-1 1-2 4-3 5-4 5-5 2-6 3-7 7-8 10-9 9-10 7-11 9-12 14-13 14-14 13-15 16-16 12-17 11-18 17-19 +0-0 1-1 2-2 3-3 3-4 8-5 8-6 8-7 9-8 10-9 5-10 14-11 15-12 16-13 16-14 18-15 19-16 20-17 22-18 22-19 23-20 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 11-8 11-9 11-10 9-11 16-12 18-15 20-16 21-17 22-18 18-19 24-20 24-21 25-22 27-24 28-25 29-26 30-27 31-28 32-29 33-30 36-31 28-32 36-33 38-34 37-35 40-36 +0-0 0-1 2-2 2-3 4-4 5-5 6-6 7-7 7-8 8-9 +0-0 1-1 2-2 3-3 4-4 7-5 8-6 9-7 10-8 9-9 11-10 13-11 12-12 16-13 17-14 18-15 19-16 20-17 14-18 21-19 24-20 21-21 22-22 25-23 +0-0 3-1 2-2 2-3 5-4 4-5 4-6 6-7 5-8 9-9 9-10 10-11 11-12 12-13 13-14 12-15 14-16 14-17 15-18 +0-0 1-1 1-2 3-3 4-4 2-5 3-6 8-7 9-8 7-9 11-10 10-11 12-12 19-13 20-14 21-15 14-16 15-17 16-18 18-19 17-20 18-21 19-22 21-23 22-24 +0-0 1-1 1-2 3-3 3-4 3-5 4-6 +0-0 1-1 3-2 5-3 6-4 7-5 6-6 9-7 8-8 5-9 12-10 13-11 14-12 15-13 16-14 17-15 18-16 19-17 20-18 18-19 24-20 26-21 24-22 24-23 23-24 9-25 26-26 27-27 +0-0 1-1 2-2 4-3 5-4 14-5 6-6 18-7 12-8 12-9 12-10 12-11 14-12 15-13 10-14 17-15 9-16 19-17 20-18 21-19 +0-0 1-1 3-2 1-3 5-4 6-5 7-6 10-7 2-8 8-9 8-10 11-12 11-13 15-14 15-15 15-16 15-17 19-18 19-19 17-20 21-21 17-22 18-23 24-24 22-25 25-26 25-27 26-28 27-30 30-31 28-32 29-33 30-34 33-35 37-36 38-37 39-38 40-41 41-42 +0-0 1-1 2-2 2-3 2-4 3-5 4-6 5-7 5-8 6-9 +0-0 1-1 1-2 2-3 2-4 3-5 5-6 7-7 10-8 7-9 8-10 9-11 10-12 12-13 13-14 15-15 15-16 17-17 16-18 19-19 21-21 23-22 23-24 23-25 25-26 26-27 27-28 23-29 24-30 28-31 31-32 31-33 32-34 33-35 31-36 34-37 29-39 36-40 +0-0 1-1 2-2 4-3 6-4 9-5 6-6 10-7 7-9 10-10 11-11 11-12 11-13 12-14 15-15 14-16 16-17 12-19 14-20 13-21 21-22 23-23 14-25 26-26 29-27 26-28 22-29 27-30 27-31 27-32 32-33 32-34 31-35 33-36 19-37 37-38 38-39 39-40 38-41 42-42 37-43 43-44 44-45 44-46 42-47 45-48 46-49 +0-0 1-1 1-2 2-3 3-4 6-5 6-6 7-7 8-8 9-9 10-10 10-11 21-13 10-14 21-15 14-16 18-18 19-19 16-20 23-22 25-23 26-24 26-25 27-26 27-27 29-28 30-29 29-30 32-31 33-32 34-33 29-34 32-35 35-36 39-39 40-40 29-41 43-42 44-43 45-44 38-46 46-48 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 7-7 8-8 +0-0 1-1 12-2 2-4 7-5 9-6 6-7 8-8 9-9 11-10 13-11 4-12 15-13 14-14 17-15 16-16 18-17 20-18 20-19 21-20 22-21 23-22 24-23 26-24 27-25 31-26 32-27 33-28 34-29 +0-0 1-1 2-2 5-3 5-4 4-5 4-6 4-7 7-8 11-9 8-10 8-11 9-12 10-13 10-14 12-15 13-16 14-17 +0-0 2-1 3-2 4-3 5-4 5-5 7-6 6-7 8-8 6-9 9-10 10-11 11-12 12-13 13-14 +0-0 2-1 3-2 5-3 8-4 7-5 15-6 13-7 9-8 10-9 11-10 16-11 +0-0 1-1 1-2 2-3 4-4 6-5 6-6 9-7 11-9 12-10 13-11 16-12 14-13 17-14 16-15 16-16 14-17 19-18 20-19 21-20 22-21 +0-0 1-1 2-2 3-3 5-4 6-5 8-6 8-7 9-8 10-9 10-10 15-11 16-12 18-13 14-14 19-15 12-17 20-18 27-20 21-21 23-22 23-23 25-24 26-25 25-26 30-27 30-28 31-29 30-30 32-31 33-32 30-33 37-35 +0-0 1-1 1-2 4-4 3-5 4-6 6-7 5-8 10-9 11-10 12-11 12-12 8-13 13-14 15-15 14-16 15-17 15-18 16-19 16-20 18-21 19-22 26-23 29-24 25-25 26-27 30-28 26-29 30-30 29-31 32-32 31-33 34-34 34-35 35-36 31-38 38-39 41-40 37-42 43-43 44-44 45-45 +0-0 1-1 2-2 4-3 3-4 3-5 8-6 9-7 8-8 10-9 11-10 12-11 16-12 14-13 13-14 14-15 14-16 24-18 18-19 18-21 20-22 19-23 21-24 22-25 19-26 20-27 26-30 25-31 26-32 28-33 35-35 34-36 31-37 32-38 38-39 34-40 37-41 39-42 39-43 40-44 +0-0 1-1 2-2 3-3 3-4 3-5 4-6 6-7 7-8 8-9 9-10 9-11 11-12 10-13 13-14 12-15 14-16 +0-0 1-1 4-2 4-3 4-4 4-5 5-6 6-7 9-8 10-9 11-10 12-11 12-12 15-13 16-14 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 8-7 9-8 9-9 11-10 12-11 13-12 16-14 15-15 17-16 18-17 23-18 21-19 22-20 24-21 25-22 26-23 27-24 28-25 29-26 +0-0 0-1 2-2 2-3 2-4 4-5 4-6 5-7 6-8 8-9 8-10 7-11 8-12 11-14 12-15 14-16 6-17 14-18 13-19 14-20 20-22 18-23 19-24 20-25 20-26 21-27 +0-0 1-1 2-2 4-3 5-4 5-5 6-7 7-8 11-9 12-10 11-11 12-12 13-13 16-14 17-15 18-16 14-17 19-18 19-19 19-20 39-21 28-22 23-23 25-24 24-25 23-27 24-28 20-29 31-30 30-31 38-33 32-34 39-37 40-38 39-39 43-40 42-41 44-42 42-43 42-44 42-45 36-46 45-47 +0-0 1-1 3-2 2-3 4-4 6-5 6-6 6-7 8-8 10-9 13-10 9-11 13-12 15-13 14-14 15-15 16-16 19-17 19-18 31-19 21-22 28-25 27-26 30-27 29-28 31-29 +0-0 1-1 2-2 4-3 6-4 4-5 5-6 7-7 7-8 8-9 14-10 10-11 17-12 20-13 19-14 18-15 11-16 21-17 20-18 23-19 19-20 26-22 24-25 31-26 28-27 31-28 21-29 22-30 30-31 32-33 31-34 36-35 34-36 36-37 37-39 37-40 39-41 40-43 41-44 42-45 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 6-8 5-9 8-10 9-11 10-12 12-13 14-14 13-15 14-16 15-18 15-19 +0-0 3-1 2-2 1-3 4-4 10-5 9-6 12-7 7-8 6-9 10-10 11-11 12-12 12-13 10-14 13-15 14-16 16-18 18-19 21-20 21-22 22-23 23-24 26-25 25-26 27-27 27-28 28-29 29-30 30-31 +0-0 1-1 1-2 2-3 2-4 3-5 8-6 9-7 8-8 9-9 12-10 12-11 14-12 14-13 16-15 17-16 18-17 16-18 21-19 21-20 23-21 21-22 23-23 13-24 25-25 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 5-7 6-8 7-9 +1-1 3-2 2-3 6-4 9-5 10-6 10-7 11-9 8-10 11-11 11-13 13-14 13-15 15-16 16-17 18-18 21-20 19-21 18-23 24-24 30-26 25-28 26-29 30-30 29-31 30-32 33-33 32-34 33-35 29-36 36-37 37-38 38-39 39-40 40-41 41-42 42-43 43-44 44-45 29-46 46-47 +0-0 1-1 4-2 2-3 9-4 7-6 5-7 6-8 5-9 7-10 10-11 11-13 12-14 13-15 16-16 13-17 15-18 14-19 16-20 23-22 25-23 20-25 19-26 21-27 22-28 23-29 27-31 29-32 29-33 30-35 35-38 36-40 36-41 26-42 37-43 +0-0 1-1 3-2 4-3 5-4 6-5 5-6 9-7 10-8 3-9 13-10 12-11 14-12 14-13 16-14 17-15 18-16 19-17 19-18 21-19 14-20 21-21 22-22 23-23 24-24 25-25 26-26 27-27 28-28 29-29 30-30 31-31 32-32 +0-0 1-1 1-2 7-3 4-4 8-5 8-7 6-8 7-9 8-10 12-12 5-13 7-14 13-15 13-16 14-17 14-18 15-20 15-21 23-22 18-23 19-24 19-25 20-26 21-27 22-28 22-29 25-30 26-31 26-32 27-33 +0-0 1-1 2-2 4-4 5-5 7-7 2-8 9-9 10-10 18-11 10-12 11-13 12-14 14-15 14-16 16-17 17-18 18-19 17-20 21-21 18-22 22-23 23-24 24-26 26-27 29-29 30-30 31-31 32-32 33-33 32-34 34-35 35-36 38-37 39-38 37-39 42-41 41-42 44-43 43-44 45-45 46-46 47-47 48-48 +0-0 1-1 2-2 3-3 5-4 4-5 1-6 7-7 9-8 9-10 10-11 11-12 12-13 6-14 13-15 14-16 15-17 16-18 17-19 18-20 22-21 23-22 19-23 20-24 26-25 24-26 30-27 25-28 30-30 31-32 30-33 32-34 31-35 34-36 34-37 37-38 35-39 35-41 35-42 39-43 40-44 +0-0 1-1 2-2 3-3 2-4 4-5 5-6 5-7 8-8 7-9 10-10 11-11 9-12 9-13 12-14 10-15 15-16 14-17 15-18 16-19 17-20 +0-0 1-1 3-3 2-4 4-5 5-6 7-7 7-8 9-9 8-10 10-11 +0-0 1-1 5-2 3-3 4-4 7-5 14-6 5-7 16-8 11-9 12-10 13-11 10-12 17-13 20-14 15-15 23-16 22-17 28-18 25-19 23-20 25-21 27-22 27-23 29-24 33-25 32-26 35-27 34-28 36-29 +0-0 1-1 5-3 4-4 6-5 7-6 4-7 7-8 8-9 10-10 9-11 13-12 11-13 15-14 17-15 14-16 14-17 20-18 18-19 17-20 19-22 21-23 22-24 +0-0 1-2 3-3 2-4 3-5 5-6 6-7 7-8 11-9 9-10 10-11 11-12 13-13 11-14 15-15 15-16 16-17 17-18 17-19 17-20 19-21 20-22 22-23 23-24 24-25 +0-0 1-1 2-2 4-3 5-4 8-5 6-6 7-7 9-8 10-9 11-10 9-11 12-12 13-13 17-14 15-15 16-16 17-17 19-18 20-19 21-20 22-21 23-22 25-23 +0-0 1-1 2-2 4-3 5-4 9-5 8-6 10-7 13-8 17-9 15-10 16-11 18-12 19-13 20-14 19-15 22-16 22-17 22-18 24-19 27-20 28-21 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 10-8 9-9 13-10 11-11 12-12 14-13 15-14 15-15 17-16 18-17 18-18 19-19 20-20 20-21 21-22 25-23 25-24 22-25 26-26 +0-0 1-1 2-2 4-3 5-4 6-5 10-6 10-7 7-8 3-9 9-10 8-11 11-12 12-13 13-14 14-15 15-16 15-17 18-18 18-19 19-20 19-21 23-22 20-23 24-24 25-25 +0-0 1-1 2-2 3-3 4-4 8-6 8-7 10-8 11-9 13-10 14-11 16-12 16-13 18-14 18-15 19-16 21-17 20-18 24-19 25-20 +0-0 3-1 3-2 4-3 5-4 6-5 7-6 5-7 2-8 8-9 12-10 12-11 14-12 16-13 15-14 15-15 16-16 10-17 17-18 20-20 10-21 19-22 23-23 24-24 25-25 26-26 27-27 28-28 27-29 30-30 31-31 +0-0 1-1 3-2 4-3 5-4 7-5 8-6 6-7 9-8 10-9 11-10 9-11 19-12 11-13 18-14 14-15 21-16 21-17 25-18 26-19 21-20 17-21 24-22 34-24 33-27 24-28 27-29 15-30 21-31 27-32 12-33 32-34 30-35 32-36 32-37 32-38 33-39 34-40 27-41 35-42 38-43 38-44 +0-0 3-1 3-2 1-3 5-4 4-5 5-6 8-7 9-8 10-9 12-10 13-11 13-12 16-13 16-14 18-15 17-16 20-17 21-18 22-19 23-20 25-21 26-22 26-23 26-24 28-25 29-26 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 +0-0 1-1 1-2 3-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 +0-0 1-1 1-2 3-3 5-4 6-5 7-6 8-7 9-8 8-9 11-10 12-11 13-12 12-13 15-14 16-15 17-16 18-17 11-18 28-20 22-21 19-22 24-24 22-25 23-26 20-27 24-28 25-29 27-30 27-31 26-32 28-33 29-34 31-35 28-36 34-37 35-38 +0-0 1-1 1-2 3-3 4-4 5-5 2-6 5-7 5-8 8-9 12-10 11-11 8-12 12-13 12-14 14-15 13-16 15-18 13-19 17-20 13-21 18-22 19-23 20-24 20-25 21-26 22-27 +0-0 0-1 2-2 3-3 5-4 5-5 6-6 7-7 8-8 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 14-12 16-13 16-14 17-15 17-16 19-18 21-19 23-20 29-23 29-25 29-26 32-27 32-28 32-29 31-30 36-33 42-35 42-36 42-37 38-40 51-41 47-42 50-43 51-44 47-45 53-46 55-48 56-49 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 7-7 7-8 5-9 9-10 14-13 10-14 14-15 15-16 16-17 18-18 16-19 17-20 19-21 20-23 23-24 17-25 23-26 26-27 24-28 27-29 27-30 28-31 +1-2 1-3 3-5 6-6 4-7 7-8 7-9 11-10 8-11 9-12 12-13 11-14 11-15 13-16 14-17 14-18 19-19 20-20 20-21 18-22 19-23 20-24 22-25 22-26 22-27 24-28 25-29 26-30 28-31 30-33 28-34 32-35 30-36 34-37 34-38 34-39 37-40 36-41 35-42 38-43 39-44 +0-0 2-2 1-3 2-4 3-5 5-6 4-7 5-8 6-9 8-10 9-11 13-14 13-15 26-16 16-17 17-18 9-20 11-21 10-22 12-23 20-24 18-25 18-26 20-28 21-29 21-30 21-31 22-32 24-33 27-34 24-35 27-36 28-37 29-38 23-39 28-40 28-41 21-42 32-43 35-46 36-47 37-48 36-49 38-50 39-51 39-53 +0-0 0-1 2-2 2-3 4-4 +0-0 1-1 2-2 4-4 4-5 5-6 7-7 7-8 8-9 9-10 10-11 10-12 11-13 12-14 13-15 15-16 16-17 17-18 14-19 18-20 +1-0 1-1 1-2 0-3 2-4 3-5 4-6 5-7 5-8 8-10 9-11 9-12 10-13 10-14 11-15 11-16 13-17 15-18 14-19 22-22 17-23 16-24 17-25 19-26 21-27 20-28 21-29 22-30 +0-1 1-2 2-3 2-4 3-5 3-6 6-7 6-8 9-9 9-10 9-11 9-12 11-13 13-14 12-15 14-16 12-17 15-19 18-20 18-21 22-22 20-23 20-24 23-26 +1-0 1-1 0-2 1-3 6-4 8-5 9-6 5-7 7-8 7-9 9-10 9-11 14-12 2-13 11-14 12-15 5-16 13-17 15-19 16-20 17-21 17-22 19-23 21-24 21-25 22-26 20-27 23-28 23-29 24-30 25-31 26-32 27-33 +0-0 2-1 3-2 4-3 5-4 6-5 7-6 +0-1 1-2 2-3 3-4 4-5 5-6 5-7 8-8 12-9 7-11 8-12 9-13 10-15 12-16 13-17 13-18 17-20 15-22 17-23 16-24 19-25 18-26 17-27 20-28 +0-0 0-1 1-2 3-3 4-4 6-5 9-6 7-7 6-8 9-9 9-10 7-11 13-12 14-13 14-14 13-15 12-16 16-17 15-18 17-19 18-20 18-21 19-22 20-23 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 9-7 6-9 6-10 13-11 12-12 6-13 13-14 16-15 17-16 18-17 23-21 23-22 22-23 23-24 24-25 23-26 26-27 25-28 30-29 28-30 29-31 30-32 30-33 29-34 31-35 34-36 33-37 35-39 +0-0 0-1 2-2 3-3 3-4 4-5 5-6 6-7 6-8 8-9 9-10 9-11 10-12 +0-0 0-1 3-2 4-3 4-5 5-6 15-7 8-8 10-10 10-11 6-12 13-13 13-14 14-15 15-16 18-17 15-18 21-19 17-20 21-21 22-22 +0-0 1-1 1-2 2-3 4-4 4-5 2-6 7-7 8-8 9-9 10-10 11-11 11-12 12-13 13-14 14-15 16-16 15-17 17-18 +0-0 1-1 2-2 5-3 4-4 6-5 +0-0 2-1 5-2 4-3 5-4 8-5 7-6 3-7 12-8 15-9 14-10 17-11 18-12 19-13 +0-0 1-1 8-2 3-3 6-4 6-5 8-6 9-7 +0-0 0-1 1-2 4-3 3-4 3-5 5-6 7-8 6-9 7-10 9-11 13-12 10-13 11-14 12-15 13-16 15-17 15-18 17-19 +1-0 3-1 0-2 0-3 4-4 1-5 6-6 4-7 9-8 10-9 7-10 9-11 15-12 12-13 13-14 7-15 13-16 13-17 9-18 17-19 21-20 17-21 19-22 19-23 20-24 21-25 19-26 19-27 22-28 23-29 24-30 +0-0 1-1 3-2 8-3 3-4 7-5 8-6 8-7 1-8 8-9 13-10 13-11 2-12 5-13 7-14 16-15 12-16 20-17 12-18 19-19 20-21 17-22 19-24 26-25 26-26 24-27 24-28 24-29 27-30 22-31 22-32 22-33 30-34 32-35 34-36 35-37 32-38 32-39 38-40 33-41 38-43 39-44 40-45 41-46 42-47 43-50 49-52 46-55 46-56 49-57 50-58 51-59 51-60 51-61 52-62 53-63 54-64 55-65 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 7-8 8-9 9-10 10-11 10-12 11-13 14-14 14-15 15-16 +0-0 0-1 3-2 4-3 5-4 6-5 7-6 +0-0 1-1 1-2 4-3 5-4 6-5 7-6 8-7 2-8 10-9 11-10 +0-0 0-1 2-2 1-3 1-4 6-5 5-6 5-7 4-8 6-9 8-10 10-11 11-12 13-13 14-14 14-15 15-16 +0-0 1-1 3-2 4-3 6-4 8-5 3-6 5-7 9-8 9-9 11-10 11-11 11-12 10-13 9-14 13-15 14-16 14-17 15-18 16-19 17-20 11-21 19-22 20-23 21-24 21-25 22-26 +0-0 0-1 2-2 3-3 5-7 6-8 7-9 9-10 9-11 11-12 19-13 15-16 18-17 10-18 16-20 21-22 22-23 23-24 19-25 24-26 22-27 25-28 25-29 25-30 28-31 29-32 15-33 27-34 31-35 31-36 32-37 34-38 30-39 32-40 36-41 40-42 38-43 32-44 38-45 40-46 42-48 35-49 45-50 44-51 45-52 45-53 46-54 55-56 46-57 48-58 51-60 55-61 51-62 55-63 47-68 54-69 53-70 57-71 +0-0 1-1 2-2 7-3 7-4 3-5 4-6 6-7 4-8 6-9 8-10 8-11 8-12 10-13 12-14 18-15 13-16 16-17 17-18 10-19 14-20 25-21 18-22 21-23 19-24 19-25 21-26 21-27 24-28 22-30 23-31 25-32 25-33 +2-1 3-2 1-3 0-4 4-5 4-6 7-7 13-8 7-9 6-10 8-11 7-12 13-13 11-14 11-15 12-16 14-17 15-18 13-19 17-20 18-21 18-22 8-23 20-24 19-25 26-27 22-29 25-30 29-32 26-33 25-34 27-35 32-36 32-37 29-38 29-39 51-40 29-41 35-42 30-45 34-46 35-47 35-48 37-49 37-50 42-51 39-53 47-55 44-56 47-57 46-58 47-59 49-60 50-61 51-62 49-63 51-64 52-66 46-67 54-68 57-69 58-70 50-71 59-72 +0-0 2-1 1-2 3-3 3-4 4-5 4-6 9-7 7-8 8-9 9-10 10-11 10-12 11-13 11-14 12-15 13-16 15-17 16-18 14-19 7-22 18-23 19-24 21-25 21-26 +0-0 1-1 2-2 4-3 4-4 5-5 7-6 8-7 +0-0 0-1 1-2 2-3 3-4 3-5 6-6 7-7 6-8 6-9 7-10 8-11 +0-0 2-1 1-2 3-3 4-4 3-5 4-8 5-9 +0-0 1-1 3-2 4-3 4-4 4-5 7-6 7-7 8-8 8-9 10-10 +0-0 1-1 2-2 4-3 5-4 3-5 6-6 5-7 7-8 8-9 8-10 8-11 10-12 +0-0 0-1 2-2 3-3 4-4 6-5 5-6 7-7 8-8 11-10 9-11 26-12 13-13 13-14 13-15 17-16 16-18 17-19 18-20 19-21 20-22 20-23 25-25 27-26 26-27 29-28 28-29 9-30 26-31 30-32 +0-0 1-1 3-2 4-3 3-4 5-5 6-6 7-7 8-8 9-9 10-10 +0-0 0-1 1-2 2-3 4-4 3-5 5-6 5-7 4-8 5-9 6-10 7-11 7-12 9-13 9-14 10-15 +1-0 7-1 2-3 7-4 4-5 2-6 7-7 7-8 14-9 11-10 7-11 9-12 10-13 11-14 12-15 9-16 10-17 17-19 17-21 19-22 31-23 19-24 21-25 34-26 14-27 24-28 31-30 24-31 24-32 27-33 29-34 29-35 24-37 32-38 33-39 34-40 25-42 36-43 37-44 37-47 39-48 41-49 45-51 41-52 45-53 45-54 46-55 44-56 39-57 48-58 52-59 49-60 50-61 50-62 51-63 54-65 56-66 55-67 57-68 +0-0 0-1 1-2 2-3 4-4 4-5 5-6 14-7 6-8 7-9 7-10 9-11 9-12 11-13 13-14 14-15 11-16 13-17 14-18 14-19 10-20 15-21 16-22 +0-0 5-1 3-2 4-3 4-4 8-5 5-6 8-7 9-8 +0-0 1-1 0-3 2-4 3-6 5-7 6-8 10-9 6-10 8-11 8-12 9-13 11-14 10-15 12-16 9-17 13-18 12-19 13-20 14-21 14-22 14-23 17-24 16-25 18-26 19-27 20-28 20-29 22-30 20-31 23-32 +0-0 1-1 3-3 3-4 5-5 6-6 6-7 7-8 8-9 9-10 10-11 12-12 12-13 14-14 15-15 16-16 17-17 18-18 19-19 20-20 21-21 21-22 23-23 23-24 24-25 25-26 26-27 28-28 22-29 29-30 +0-0 2-1 3-2 4-3 5-4 6-5 7-6 +0-0 18-1 2-2 5-3 6-4 7-5 5-6 9-7 9-8 9-9 11-10 12-11 16-13 14-14 16-16 17-17 16-18 20-19 21-20 17-21 26-22 22-23 24-24 26-25 30-26 24-27 28-28 29-29 28-30 28-31 34-33 35-34 36-35 +1-0 1-1 2-2 3-3 5-4 4-5 6-6 7-7 8-8 7-9 10-10 11-11 12-12 13-13 14-14 +0-0 0-1 2-2 3-3 1-4 4-5 5-6 7-7 8-8 9-9 5-10 11-11 11-12 11-13 10-14 13-15 14-16 16-17 15-18 17-19 16-20 18-21 +0-0 2-1 1-2 4-3 4-4 6-5 5-6 7-7 10-8 9-9 10-10 12-11 15-12 13-13 15-14 16-15 13-16 18-17 19-18 +0-0 1-1 2-2 3-3 5-4 6-5 6-6 8-7 10-8 10-9 12-10 +0-0 1-1 1-3 2-4 4-5 5-6 6-8 9-9 6-10 10-11 11-12 16-13 14-14 15-15 17-17 18-18 14-19 20-20 16-21 21-22 24-23 17-24 23-27 27-28 28-30 26-31 27-32 24-33 37-34 29-35 33-36 34-37 31-38 29-39 39-40 35-41 37-42 42-43 39-45 41-46 44-47 43-48 44-49 46-50 43-51 48-52 43-53 49-54 49-55 +0-0 1-1 2-2 2-3 3-4 6-5 4-6 4-7 12-8 13-9 9-10 10-11 17-12 18-13 18-14 16-15 23-16 18-17 23-18 18-19 27-20 25-21 29-22 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 8-9 10-10 11-11 12-12 13-13 +0-0 2-1 0-3 4-4 4-5 5-6 6-7 6-8 7-9 8-10 9-11 12-12 6-13 12-14 14-15 11-16 15-17 10-18 16-19 18-21 18-22 19-23 20-24 21-25 22-26 +12-0 1-1 11-2 2-3 14-4 4-5 4-6 5-7 6-8 6-9 8-10 9-11 10-12 2-13 16-14 11-15 11-16 14-17 15-18 17-20 19-21 20-22 21-23 22-24 23-25 12-26 24-27 23-28 21-29 26-30 29-34 29-35 34-36 31-37 31-38 34-39 38-40 35-42 36-43 37-44 39-45 40-46 41-47 43-49 40-50 45-51 46-52 +0-0 1-1 2-2 3-3 5-4 5-5 6-6 8-7 7-8 18-9 9-11 10-12 11-13 12-14 13-15 14-16 15-17 16-18 17-19 18-20 19-21 20-22 21-23 +0-0 1-1 2-2 4-4 6-5 3-6 4-7 10-9 10-11 14-12 15-13 16-14 14-15 17-16 18-17 22-19 18-20 19-21 20-22 23-23 23-24 25-26 26-27 27-28 27-29 31-31 32-32 33-33 33-34 30-35 34-36 +0-0 1-1 4-2 7-3 2-4 3-5 4-6 5-7 8-8 10-9 10-10 11-11 13-13 14-14 15-15 23-16 19-17 18-18 22-19 21-20 23-21 24-22 +0-0 1-1 6-3 5-4 5-5 9-6 10-7 11-8 11-9 12-10 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 9-10 12-11 12-12 13-13 14-14 15-15 15-16 16-17 +0-0 1-1 3-2 4-3 7-4 8-5 8-6 9-7 +4-0 2-1 6-2 5-3 5-4 6-5 11-6 6-7 10-8 10-9 6-10 13-11 11-12 15-13 15-14 22-15 19-16 18-17 22-18 23-19 +0-0 5-1 2-2 2-3 4-4 5-5 3-6 5-7 6-8 7-9 13-10 8-11 10-12 16-13 13-15 13-16 12-17 15-19 16-20 19-21 16-22 21-23 20-24 22-26 23-27 24-28 26-29 27-30 26-31 30-32 28-33 30-34 30-35 31-36 32-37 +0-0 4-1 3-3 5-4 8-5 9-6 16-7 1-8 2-9 8-11 11-12 11-13 9-14 10-15 13-16 5-18 20-20 16-21 11-22 18-23 19-24 20-25 25-26 25-27 25-28 25-29 24-30 24-31 27-32 28-33 27-34 31-35 32-36 34-37 34-38 35-39 34-40 37-41 30-42 39-43 36-44 41-45 41-46 42-47 43-48 44-50 +0-0 1-1 5-2 2-3 3-4 4-5 5-6 6-7 6-8 7-9 10-10 13-11 12-12 9-13 16-14 17-15 15-16 21-17 18-18 26-19 23-20 19-21 13-22 22-23 20-24 24-25 30-26 30-27 31-28 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 8-7 6-8 7-10 8-11 11-12 10-13 13-14 12-15 8-16 15-17 16-18 17-19 14-20 18-21 18-22 19-23 20-24 23-25 22-26 23-27 23-28 24-29 +0-0 1-1 2-2 3-3 2-4 4-5 4-6 5-7 6-8 9-9 11-10 8-11 8-12 9-13 10-14 11-15 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 5-7 7-8 8-9 9-10 11-11 15-12 18-13 18-14 13-15 20-16 12-17 16-18 11-19 21-21 17-22 23-23 25-25 21-26 29-27 34-28 24-29 26-30 29-31 26-32 30-33 30-34 34-35 34-36 30-37 32-38 35-39 +0-0 2-1 6-2 4-3 3-4 4-5 4-6 7-7 6-9 9-10 9-11 10-13 11-14 17-15 27-16 14-17 14-18 15-19 17-21 23-22 20-23 23-24 23-25 24-26 25-27 25-28 27-29 27-30 31-31 31-32 31-33 34-34 34-35 32-36 33-37 39-38 39-39 38-40 38-41 35-42 37-43 41-44 +0-0 1-1 1-2 2-3 2-4 5-5 6-6 7-7 7-8 4-9 9-10 9-11 10-12 15-13 14-14 14-15 16-16 17-17 26-18 17-19 21-20 18-21 19-22 23-23 26-24 28-25 28-26 24-27 29-28 +0-0 1-1 2-2 4-3 5-4 2-5 6-6 6-7 7-8 8-9 19-10 10-11 8-12 10-13 24-14 14-15 14-16 16-17 20-20 19-21 20-22 18-23 22-24 23-25 24-26 25-27 +0-0 1-1 2-2 2-3 3-4 5-6 5-7 7-8 7-9 7-10 9-11 8-12 10-13 11-14 13-15 14-16 15-17 15-18 16-19 +0-0 1-1 1-2 4-3 5-4 6-5 9-6 9-7 9-8 10-9 8-10 13-11 15-13 16-14 13-15 18-16 19-17 20-18 21-19 17-20 19-21 18-22 19-23 21-24 25-25 26-26 21-27 21-28 27-29 33-30 27-32 30-33 29-34 27-35 32-37 31-38 34-39 33-40 35-41 +0-0 1-1 2-2 2-3 5-4 4-5 6-6 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 9-9 15-11 11-12 16-14 18-15 17-16 18-17 18-18 20-19 19-20 24-21 22-22 23-23 25-24 +0-0 1-1 2-2 1-3 5-4 6-5 6-6 6-7 8-8 11-9 11-10 12-11 13-12 14-13 15-14 18-15 16-16 18-17 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 5-7 6-8 7-9 8-10 9-11 12-12 9-13 10-14 12-15 12-16 10-17 14-18 15-19 16-20 17-21 +0-0 1-1 2-2 3-3 5-4 4-5 4-6 5-7 6-8 7-9 7-10 9-11 10-12 10-13 12-14 13-15 14-16 13-17 14-18 15-19 +0-0 1-1 1-2 2-3 3-4 4-5 4-6 5-7 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 8-7 10-8 9-9 9-10 10-11 13-12 14-13 15-14 12-15 12-16 17-17 16-18 23-19 19-20 22-21 21-22 22-23 24-24 +0-0 1-1 2-2 2-3 6-4 4-5 6-6 6-7 8-8 8-9 10-10 18-13 24-14 14-15 12-16 13-17 17-18 18-19 17-20 19-21 20-22 23-23 24-24 25-25 26-26 15-28 27-29 30-30 28-31 29-32 31-33 +0-0 1-1 1-2 4-3 3-4 4-5 6-6 6-7 7-8 8-9 8-10 9-11 10-12 10-13 12-14 +0-0 0-1 2-2 3-3 12-4 6-5 8-6 9-7 10-8 11-9 12-10 13-11 15-12 16-13 17-14 +0-0 0-1 3-2 4-3 2-4 5-5 4-7 5-8 5-9 7-10 9-11 9-12 10-13 10-14 10-15 11-16 +0-0 1-1 7-2 5-4 6-5 0-6 5-7 5-9 6-10 9-11 10-12 9-13 8-14 14-15 14-16 13-17 17-18 15-19 17-20 23-21 16-22 21-24 24-26 25-27 22-28 24-29 25-30 28-31 29-32 31-33 30-34 29-35 31-36 31-37 32-38 33-39 36-40 38-41 38-42 39-43 39-44 +0-0 0-1 2-2 5-3 4-4 6-5 7-6 9-7 7-8 9-9 10-10 11-11 11-12 13-13 12-14 15-15 14-16 16-17 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 4-7 5-8 9-9 8-10 10-11 5-12 13-13 9-14 8-15 12-16 11-19 12-20 13-21 13-23 18-24 18-25 16-26 18-27 12-28 17-29 19-30 19-31 20-32 21-33 +1-0 2-1 3-2 5-3 6-4 +1-0 2-1 3-2 4-3 10-4 4-5 5-6 6-7 7-8 9-9 4-10 13-11 9-12 9-13 13-14 10-15 11-16 13-17 14-18 15-19 16-20 17-21 18-22 16-23 20-24 19-25 20-26 21-27 22-28 23-29 24-30 24-31 27-32 +0-0 5-1 8-2 12-3 3-4 13-5 8-6 10-9 1-10 1-11 19-13 20-14 15-15 18-17 22-18 23-19 24-20 21-22 31-23 37-25 28-26 28-27 38-28 39-29 39-30 35-32 39-33 41-34 38-35 42-36 42-37 46-38 52-39 50-40 25-41 75-43 51-44 51-45 53-46 54-47 49-48 53-50 60-51 59-52 62-53 69-54 62-55 61-56 45-57 68-58 69-59 69-60 69-62 70-63 69-64 75-65 +0-0 1-1 3-2 2-4 14-5 5-6 6-7 10-8 7-9 12-10 14-12 11-13 17-14 18-15 21-16 24-19 24-20 25-21 24-22 29-23 29-25 34-26 37-27 44-28 33-29 31-30 34-31 39-32 38-33 42-34 43-36 41-37 46-40 52-42 49-43 53-44 55-45 53-46 56-47 52-48 56-49 58-50 62-51 62-52 55-53 67-55 67-56 68-57 67-58 67-59 71-60 +0-0 8-1 8-2 16-3 15-4 5-5 5-6 5-7 9-8 9-9 9-10 14-12 14-13 17-14 14-15 14-16 20-17 14-18 3-19 17-20 28-22 16-23 22-25 28-27 31-30 31-31 34-32 29-33 34-34 34-35 31-36 49-37 35-38 36-39 36-40 43-41 40-42 42-43 49-45 44-46 47-47 49-48 50-50 50-51 51-52 52-53 53-54 54-55 55-56 56-57 +0-0 15-1 1-2 2-3 9-4 10-5 6-6 2-7 15-9 20-10 16-11 17-12 19-13 21-14 0-17 26-18 25-19 25-20 32-21 29-22 32-23 32-24 36-25 38-26 41-27 45-28 37-29 14-30 41-31 33-32 48-33 49-35 50-36 49-37 52-38 56-39 66-41 66-42 54-43 66-45 58-46 66-48 64-49 65-50 66-51 71-52 74-54 72-55 73-56 75-57 75-59 86-61 79-62 84-63 83-65 82-66 85-67 86-69 93-70 94-72 100-74 107-75 101-76 98-77 111-78 117-79 124-82 104-84 109-86 123-87 106-88 107-89 116-90 119-91 121-92 124-93 +0-0 1-1 +1-0 32-1 1-2 2-3 3-4 4-5 5-6 17-8 10-9 15-10 15-11 18-14 18-15 13-16 14-17 16-18 26-19 25-20 19-21 23-22 26-23 22-24 29-25 31-26 31-27 29-28 29-29 40-30 37-31 40-32 45-33 40-34 44-35 43-37 43-38 43-39 43-40 32-41 49-42 +0-0 1-1 2-2 10-4 12-5 6-6 7-7 8-8 9-9 11-10 12-11 9-12 16-13 17-14 18-15 14-16 15-17 13-18 21-20 20-21 16-22 21-23 27-24 16-25 27-26 26-27 26-28 26-29 36-30 29-31 38-32 29-34 45-35 37-36 31-37 49-38 41-39 43-40 40-41 43-42 39-43 42-44 51-45 54-46 39-47 42-48 42-49 57-50 52-51 54-52 58-53 58-54 59-55 58-56 54-57 67-58 51-59 61-61 67-62 63-64 67-65 67-66 64-67 65-68 51-69 59-70 69-71 67-72 66-73 70-74 72-75 72-76 71-77 73-78 +0-0 1-1 +0-0 1-1 1-2 3-3 3-4 3-5 3-6 7-7 8-8 8-9 11-10 12-11 13-12 14-13 14-14 15-15 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 7-7 9-8 11-9 10-10 10-11 13-12 16-13 14-14 15-15 23-16 16-17 20-18 18-19 19-20 23-21 6-22 33-23 20-24 33-25 57-28 36-30 41-32 41-33 34-34 42-36 38-37 57-42 51-43 45-44 50-45 79-46 55-47 56-48 62-50 59-51 64-52 62-53 51-54 67-55 67-56 68-57 72-58 75-59 76-61 75-62 75-63 77-64 79-65 +0-0 +0-0 3-1 4-2 3-3 5-4 5-5 8-7 9-8 8-9 8-10 15-11 10-12 11-13 18-15 14-16 21-17 22-21 26-22 26-23 27-24 28-25 32-26 32-27 33-28 37-29 33-30 37-32 36-33 32-34 41-35 41-36 34-37 45-38 45-39 46-40 +3-0 4-1 5-2 6-3 2-4 3-5 15-6 22-7 7-8 5-10 23-11 12-12 17-13 27-15 17-16 20-17 20-18 36-19 33-20 36-21 35-22 35-23 24-24 29-25 37-26 31-27 39-28 36-29 32-30 37-31 34-32 35-33 39-34 34-35 40-36 42-37 43-38 43-39 43-41 +0-0 2-1 1-2 4-3 4-4 5-5 5-6 8-7 9-8 10-9 11-10 12-11 11-12 22-13 15-14 14-15 17-17 18-18 19-19 19-20 20-21 23-22 23-23 24-24 25-25 22-26 26-27 +0-0 1-1 2-2 +5-0 3-1 3-2 4-3 5-4 8-5 4-6 10-7 10-8 13-10 13-11 14-12 4-13 13-14 20-15 18-16 22-17 18-18 18-19 18-20 27-21 29-23 20-24 26-26 34-27 34-28 33-29 35-30 33-31 24-32 33-33 43-34 37-35 38-36 37-37 40-38 41-39 41-40 46-41 47-42 48-43 50-44 41-45 52-46 57-47 54-48 55-49 55-50 56-51 57-52 54-53 57-54 59-55 59-56 55-57 62-58 63-59 55-60 64-61 61-62 67-63 +5-1 5-2 4-3 5-4 6-5 5-6 5-7 6-8 10-9 8-10 9-12 14-13 15-14 20-15 15-16 17-17 17-19 19-20 19-21 20-22 22-23 22-24 23-25 24-26 26-27 23-28 25-29 24-30 30-31 27-32 27-33 28-34 28-35 31-36 +0-0 0-1 +1-0 3-1 4-2 1-3 7-4 8-5 11-6 12-7 14-9 21-10 14-11 11-12 25-13 20-14 24-15 19-16 19-17 19-18 26-19 27-20 28-21 42-22 32-23 23-24 46-25 34-26 32-27 34-28 32-29 38-30 40-31 37-32 30-33 30-34 45-37 45-38 54-39 47-40 41-41 55-42 56-43 58-44 48-45 51-46 59-47 60-48 60-49 62-51 63-52 64-53 66-54 66-55 66-56 67-57 72-59 74-60 71-62 71-63 78-65 78-66 +0-0 +0-0 2-1 2-2 4-3 6-4 8-5 7-6 5-7 12-8 8-9 9-10 16-11 16-12 19-13 20-14 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 7-7 7-8 10-9 8-10 7-11 10-12 10-13 9-14 9-16 9-17 11-18 15-19 21-20 21-21 14-22 14-23 15-24 16-26 17-27 23-28 24-29 23-30 26-31 26-32 26-33 28-35 29-36 30-37 32-38 32-39 30-40 35-41 35-43 +0-0 1-1 6-2 6-3 7-4 11-5 6-6 8-7 9-8 12-9 16-11 33-14 19-15 22-17 29-18 21-19 27-20 30-21 27-22 28-23 24-24 24-25 24-26 46-27 31-28 33-29 38-30 38-31 43-33 43-36 45-37 46-38 59-39 49-40 54-42 50-44 60-45 62-49 60-51 69-54 71-55 71-56 70-57 72-58 +0-0 1-1 +1-0 0-1 2-2 3-3 3-4 5-5 7-6 6-7 14-8 12-9 12-11 13-12 16-13 13-14 13-15 18-16 19-17 22-18 23-19 +0-0 1-1 4-2 4-3 5-4 6-5 6-6 9-7 10-8 11-9 11-10 16-11 22-14 16-15 18-16 19-17 20-18 22-19 24-21 25-22 25-23 27-24 30-25 29-26 43-27 33-28 36-29 30-30 33-31 37-34 38-35 39-36 43-39 46-41 48-43 49-44 54-45 54-46 70-47 60-49 59-50 57-51 58-52 60-53 63-54 64-55 65-56 66-57 67-58 66-59 69-60 68-61 70-62 +0-0 0-1 2-2 +0-0 2-1 2-2 3-3 4-4 5-5 7-6 7-7 10-8 10-9 11-10 12-11 13-12 11-13 14-14 15-15 17-16 18-17 21-18 23-20 20-21 23-22 27-23 27-24 28-25 28-26 30-27 +0-0 1-1 1-2 3-3 3-4 2-5 5-6 5-7 8-9 12-11 13-12 16-13 23-14 14-15 22-16 15-17 19-18 18-19 22-20 17-21 25-22 25-23 29-24 28-25 27-26 28-27 31-28 32-29 38-30 18-31 34-32 34-33 35-34 36-35 37-36 40-37 41-38 43-39 41-40 48-42 38-43 48-44 51-45 50-46 53-47 52-48 54-49 +0-0 0-1 0-2 1-3 2-4 +0-0 1-1 5-2 0-3 2-4 3-5 4-7 5-9 7-10 7-11 9-12 10-13 11-14 10-15 14-16 14-17 14-18 13-19 17-20 15-21 12-22 17-23 17-24 18-25 +0-0 1-1 1-2 3-3 2-4 4-5 5-6 5-7 7-8 7-9 14-10 16-12 11-14 25-15 8-16 21-17 21-19 22-20 24-21 18-23 27-24 27-25 28-26 29-27 30-28 33-29 32-30 35-31 32-32 32-33 37-34 36-35 39-36 38-37 40-38 +0-0 0-1 2-2 2-3 6-4 4-5 8-6 8-7 9-8 13-9 15-10 18-11 18-12 18-13 +0-0 1-1 1-2 4-4 9-6 8-7 20-8 19-9 7-10 8-11 9-12 10-13 11-14 15-16 17-18 18-19 20-20 19-21 20-22 21-23 22-24 22-25 27-28 22-29 30-30 31-31 32-32 33-33 32-34 34-35 34-36 35-37 36-38 37-40 38-41 39-42 36-43 41-44 40-45 43-46 42-47 44-48 +1-0 1-1 4-2 5-3 7-4 +0-0 1-1 1-2 3-3 4-4 8-5 8-6 10-7 15-8 12-9 7-10 16-11 18-12 19-13 20-14 21-15 22-16 23-17 25-18 25-19 26-20 +0-0 2-1 6-2 6-3 4-4 5-5 8-6 6-7 9-8 10-9 11-10 12-11 13-12 13-13 15-14 17-15 16-16 16-17 22-18 19-19 17-20 20-22 23-23 25-25 29-26 32-27 27-28 27-29 27-30 28-31 31-32 30-33 33-35 +0-0 2-1 5-2 5-3 5-4 7-5 7-6 9-7 9-8 11-9 13-10 12-11 13-12 13-13 18-14 16-15 16-16 35-17 13-19 29-20 27-21 22-22 25-23 31-24 27-25 33-26 35-27 37-28 36-29 39-30 40-31 42-32 44-33 44-34 46-35 47-36 43-37 47-38 51-39 51-40 53-41 54-42 53-43 47-44 55-45 +0-0 2-1 10-3 4-4 6-5 7-6 9-7 10-8 10-9 11-10 14-11 15-12 14-13 17-14 14-15 18-16 17-17 21-18 23-19 24-20 22-21 25-22 29-23 30-24 28-25 25-26 32-27 33-28 33-29 34-30 34-31 37-32 37-33 39-34 40-35 41-36 43-37 36-38 44-39 +0-0 3-1 5-2 6-3 7-4 10-5 10-6 15-7 12-8 15-9 13-10 17-11 16-12 26-13 24-14 26-15 25-16 30-17 25-18 33-19 32-20 36-21 +1-0 4-1 6-2 +0-0 5-1 4-2 7-3 3-4 11-5 7-6 13-7 10-8 10-9 16-10 12-11 15-13 18-14 19-15 18-16 18-17 22-18 30-19 22-20 7-21 21-22 26-23 26-24 23-25 25-26 31-27 31-28 31-29 32-30 36-31 37-32 38-33 39-34 40-35 41-36 42-37 +0-0 2-2 11-3 3-4 4-5 4-6 5-7 7-8 7-9 7-10 7-11 8-12 11-13 13-14 14-15 15-16 17-17 16-18 16-19 10-20 18-21 21-23 25-24 20-25 23-26 27-27 23-28 30-30 44-31 31-32 32-33 28-34 32-35 43-36 44-37 38-38 39-39 40-40 45-41 42-43 44-44 45-46 +0-0 2-2 3-3 4-4 6-5 7-6 8-7 9-8 11-9 12-10 13-11 14-12 15-13 16-14 17-15 25-17 26-18 22-19 21-20 20-21 27-22 29-24 29-25 36-26 30-27 34-28 34-29 33-30 37-31 40-32 43-33 40-34 41-35 44-36 44-37 42-38 43-39 52-40 46-41 50-42 48-43 46-44 52-46 55-47 +0-0 7-1 8-2 2-3 16-4 4-5 3-6 9-7 11-8 12-9 13-10 11-11 3-12 15-13 12-14 19-15 17-16 16-17 20-18 18-19 22-20 22-21 19-22 9-23 21-24 19-25 11-26 28-27 20-28 20-29 41-30 27-31 33-32 34-33 17-36 36-37 35-38 38-39 37-40 62-41 36-42 43-44 44-45 40-46 39-47 44-48 39-50 47-51 46-52 26-53 49-54 50-55 47-56 48-57 52-58 51-59 52-60 53-61 53-63 55-64 56-65 57-66 58-67 59-68 60-69 61-70 62-71 63-72 +0-0 5-1 5-3 2-4 5-5 9-6 7-7 9-8 14-9 14-10 14-11 14-12 15-13 23-14 17-16 20-17 22-18 26-19 27-20 28-21 30-22 25-23 31-24 35-26 50-27 35-28 41-29 40-30 32-31 47-32 48-33 37-35 63-36 52-38 56-39 51-40 52-41 59-42 55-43 60-44 62-46 63-48 +0-0 1-1 1-2 4-3 4-4 5-5 6-6 6-7 6-8 9-11 9-12 16-13 11-14 15-15 13-16 15-18 21-19 22-20 22-23 22-24 26-26 29-27 27-28 28-29 31-30 35-31 35-32 38-33 34-34 34-35 38-36 38-37 39-38 45-39 41-41 36-42 38-43 41-44 41-45 33-46 34-47 49-49 48-50 51-51 50-52 52-54 +0-0 0-1 6-2 7-3 8-4 9-5 6-6 15-7 9-8 9-9 17-11 19-12 18-13 20-14 20-15 17-17 30-18 31-19 32-20 28-21 34-22 34-23 34-24 23-25 37-26 42-27 38-28 43-29 41-30 32-31 40-32 49-33 43-35 54-36 53-37 56-38 55-39 57-40 +0-0 1-1 7-2 8-3 2-4 2-5 4-6 9-8 11-9 12-10 13-11 11-12 15-14 16-16 17-17 16-18 20-19 18-20 22-21 22-22 23-23 24-24 +0-0 1-1 2-2 2-3 4-4 6-6 6-7 4-9 5-10 10-11 9-12 +0-0 1-1 2-2 3-3 5-4 5-5 5-6 7-7 9-8 15-9 19-10 20-11 17-12 16-13 15-14 16-15 12-16 +0-0 1-1 1-2 5-3 5-4 22-5 7-6 7-7 10-8 10-9 11-10 13-11 14-12 12-13 12-14 18-15 12-16 19-17 22-20 14-21 19-22 29-23 22-24 30-25 32-26 28-27 30-28 28-29 28-30 32-31 31-32 30-33 35-34 33-35 34-36 34-37 36-38 38-39 34-40 39-41 40-42 36-43 38-44 42-45 43-46 40-47 45-49 48-51 +0-0 1-1 4-2 3-3 4-4 5-5 6-6 5-7 9-8 8-9 11-10 8-11 12-12 12-13 13-14 14-15 19-16 16-17 19-19 18-21 19-23 20-24 21-25 23-26 25-27 28-28 23-29 31-30 35-31 17-32 25-33 30-34 30-35 38-36 37-37 40-38 40-39 40-40 41-41 41-42 43-43 45-44 45-45 45-46 47-47 +0-0 1-1 12-3 4-4 8-5 10-6 9-9 9-10 9-11 11-12 11-13 14-14 15-15 17-16 17-17 17-18 19-19 20-20 21-21 22-22 23-24 26-25 24-26 25-27 28-28 28-29 29-30 29-31 31-32 29-33 32-34 34-35 34-36 42-37 36-38 39-39 35-40 34-41 40-42 41-43 42-44 +0-0 4-1 2-2 5-4 4-5 6-6 5-7 4-8 6-9 8-10 7-11 9-12 9-13 15-14 4-15 10-16 11-18 17-19 16-20 16-21 21-22 19-23 12-24 19-25 17-26 18-27 21-28 19-29 16-30 32-31 23-32 26-33 24-34 33-35 33-36 30-37 33-38 34-39 40-40 31-41 33-42 33-43 36-44 33-46 38-47 40-48 41-49 26-50 41-52 41-53 16-54 41-55 44-56 44-57 46-58 41-59 45-60 48-61 49-63 +1-0 1-1 1-2 3-3 2-4 5-5 6-6 3-7 4-8 8-9 9-10 10-11 11-12 9-13 11-14 11-15 16-17 +0-0 1-1 5-2 5-3 4-4 5-5 6-6 4-7 4-8 8-9 9-10 8-11 19-13 19-14 16-15 18-18 24-19 25-20 24-21 24-22 29-23 30-24 30-25 36-26 36-27 34-30 37-31 35-32 38-33 35-34 37-35 40-36 43-37 44-38 43-39 41-40 46-41 +1-0 2-1 2-2 7-3 1-4 2-5 4-6 16-7 11-8 13-9 14-11 20-12 22-14 15-15 15-16 17-17 23-18 27-19 19-20 25-21 26-22 29-23 26-24 35-25 36-26 36-27 33-28 33-29 34-30 36-31 37-32 +0-0 1-1 2-2 6-3 4-4 4-5 6-6 9-7 11-8 12-9 12-10 14-11 16-12 12-13 1-15 17-19 18-20 19-21 20-22 20-23 21-24 21-25 24-26 11-27 29-28 29-29 26-30 30-31 26-32 29-33 31-34 33-35 35-37 35-38 36-39 37-40 38-42 +0-0 3-1 3-2 2-4 5-5 5-6 6-7 6-8 2-9 10-10 2-11 8-12 16-13 13-14 13-15 12-16 13-17 13-18 8-19 17-20 20-22 21-23 19-25 20-26 20-27 23-28 31-29 22-30 31-31 31-32 30-33 29-34 30-35 31-36 36-37 36-38 36-39 40-40 42-41 43-42 40-44 38-45 38-46 46-47 41-48 45-49 30-50 49-52 49-53 49-54 51-55 +0-0 1-1 2-2 3-3 3-4 2-5 5-6 6-7 7-8 8-9 9-10 12-11 13-12 10-13 18-14 15-15 19-16 20-17 18-18 17-19 17-20 17-21 14-23 21-24 22-25 14-26 25-28 26-29 29-30 28-31 26-32 30-33 32-34 25-35 33-36 35-37 28-38 34-39 36-40 37-41 38-42 39-43 +0-1 1-2 3-3 3-4 5-5 6-6 6-7 5-8 17-9 6-10 12-11 14-12 12-13 14-14 15-15 15-16 17-17 20-18 18-20 19-21 25-22 22-23 22-24 24-25 25-26 27-27 29-28 23-29 29-30 30-31 31-32 32-33 35-34 34-35 35-36 36-37 37-38 38-39 39-40 41-41 42-42 43-43 44-44 +0-0 2-2 4-3 5-5 6-6 8-7 8-8 8-9 10-10 12-11 11-12 7-13 16-15 16-16 17-17 18-18 21-19 18-20 18-21 19-22 20-23 19-24 23-25 25-26 26-27 27-28 30-29 29-30 29-31 29-32 30-33 29-35 34-36 35-37 36-38 36-39 37-40 38-41 39-42 +2-0 1-1 4-4 2-5 6-6 7-7 10-8 9-9 15-10 15-11 17-15 18-16 17-17 19-18 22-19 24-20 25-21 26-22 28-23 27-24 30-26 30-27 32-28 33-29 +0-0 5-1 1-2 3-3 7-4 4-5 8-6 6-7 10-9 10-10 21-11 19-12 19-13 16-14 19-15 19-16 21-17 23-18 22-19 24-20 28-21 28-22 33-23 31-24 31-25 34-26 34-27 38-29 42-30 40-31 42-32 40-33 43-34 +1-0 3-1 2-2 3-3 5-4 7-5 7-6 9-7 8-8 12-9 11-10 13-11 +0-0 2-1 3-2 5-3 7-4 6-5 6-6 9-7 10-8 9-9 12-10 13-11 9-12 14-13 15-14 16-15 21-16 19-17 21-18 21-19 24-20 23-21 24-22 25-23 28-25 30-26 34-27 36-28 33-29 26-30 35-31 38-32 36-33 40-34 40-35 +0-0 1-1 2-2 5-3 2-4 7-5 9-6 20-7 18-8 19-9 20-10 14-11 49-12 20-13 21-14 10-15 11-16 22-17 12-18 24-19 26-20 29-21 26-22 17-23 30-24 30-25 30-26 36-27 36-28 27-29 36-30 39-31 37-32 38-33 37-34 40-35 41-36 43-37 44-38 48-40 46-41 54-42 53-43 54-44 55-45 58-47 60-48 65-49 67-50 64-51 56-52 56-53 68-54 67-55 70-57 +0-0 1-1 2-2 3-3 4-4 4-5 9-6 9-7 8-8 13-9 13-10 7-11 16-12 11-13 14-14 15-15 16-16 7-17 29-18 29-19 21-20 21-21 22-22 23-23 23-24 23-25 31-28 30-29 32-30 31-31 34-32 34-33 38-34 38-35 40-36 34-37 43-38 40-39 39-40 46-41 46-42 49-43 46-44 53-45 46-46 51-47 42-48 51-49 37-50 54-52 +1-0 1-1 4-2 10-3 5-4 3-5 6-6 10-7 10-9 13-10 3-12 17-13 3-15 20-16 20-18 26-19 26-21 26-22 27-23 29-24 29-25 32-26 46-30 45-31 46-32 36-33 8-34 43-35 44-36 50-37 77-39 50-41 50-42 50-43 50-44 50-45 56-46 59-47 58-48 59-49 58-50 64-51 73-52 66-54 66-56 66-57 70-58 66-60 70-61 75-62 77-64 +2-0 3-1 2-2 5-3 5-4 6-5 6-6 10-7 10-8 8-9 12-10 16-11 12-12 12-13 12-14 17-15 17-16 20-17 17-18 17-19 29-20 33-21 17-22 28-24 28-25 22-26 27-27 28-28 25-29 31-30 27-31 38-32 39-33 30-34 42-35 35-36 41-38 38-39 45-41 44-42 42-43 47-44 45-45 46-46 51-47 48-48 51-49 50-50 +12-0 13-1 2-2 5-3 5-4 10-5 8-7 5-8 9-9 8-10 3-11 17-12 9-13 17-14 20-15 18-16 18-17 20-18 17-19 21-20 17-21 25-22 26-23 27-24 29-25 28-26 30-27 31-28 34-29 34-30 37-31 31-32 37-33 35-34 44-35 48-36 44-37 44-38 45-39 39-40 47-42 47-43 47-44 51-45 +0-0 1-2 2-3 2-4 2-5 2-6 13-7 6-8 7-9 14-10 14-11 14-12 14-13 3-14 59-15 18-18 13-19 20-20 20-21 28-22 23-23 30-24 26-25 30-26 32-27 30-28 30-29 32-30 33-31 34-32 36-33 41-34 38-36 51-37 26-38 41-39 43-40 46-41 46-42 47-43 46-44 47-45 49-46 51-47 51-48 55-49 55-51 55-52 43-53 58-54 58-55 60-56 +1-0 16-1 3-2 3-3 6-4 7-5 7-6 14-7 19-8 14-9 17-10 19-11 20-12 22-13 25-14 27-15 26-16 29-17 28-18 37-19 26-20 40-21 32-22 26-24 44-25 45-26 50-27 40-28 20-29 40-30 40-31 49-32 53-33 55-35 56-36 66-37 50-38 59-39 64-40 52-41 65-42 66-43 64-44 70-45 71-46 67-47 74-48 75-49 74-50 91-51 77-52 83-54 76-55 87-56 86-57 87-58 90-60 70-61 91-62 +0-0 10-1 6-2 5-3 9-4 9-5 10-6 10-7 8-8 8-9 11-11 12-12 13-14 4-16 18-17 18-18 22-19 20-20 21-21 20-22 25-23 25-24 24-25 20-26 26-27 28-28 20-29 29-30 40-32 34-33 36-35 37-36 48-37 45-38 40-39 49-40 44-41 45-42 46-43 49-44 49-45 51-46 46-47 52-48 51-49 54-50 57-51 55-52 56-53 59-54 45-56 62-57 81-59 67-61 63-62 68-63 65-64 70-65 73-66 65-67 78-69 78-70 78-71 75-72 73-73 81-74 81-75 +0-0 8-1 4-2 4-3 4-4 4-5 10-6 5-7 12-8 5-9 9-10 7-11 13-13 14-14 15-15 17-16 18-17 19-18 20-19 19-20 22-21 23-22 19-23 25-24 26-25 31-26 24-27 31-28 24-29 32-30 33-31 36-32 37-33 38-34 36-35 38-36 43-37 38-38 39-39 33-40 33-41 44-42 43-43 45-44 45-45 +0-0 1-1 2-2 +0-0 1-1 2-2 4-3 6-4 7-5 7-6 3-7 10-8 11-9 12-10 13-11 14-12 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 15-8 18-10 10-11 14-12 12-13 17-14 14-15 17-16 30-17 19-18 19-19 24-20 24-21 25-22 26-23 27-24 28-25 28-26 30-27 +0-0 3-1 11-2 4-3 5-4 5-5 7-6 9-7 11-8 15-9 12-10 13-11 16-12 18-13 19-14 19-15 21-16 22-17 +0-0 2-1 3-2 3-3 9-4 16-5 18-7 19-8 4-9 5-10 12-11 7-12 11-13 15-14 13-15 10-16 22-18 11-19 30-20 31-21 24-22 34-23 35-24 26-25 27-26 23-27 29-28 33-29 33-30 48-31 33-33 33-34 26-35 38-36 40-37 41-38 41-39 42-40 43-41 44-42 45-43 46-44 41-45 48-46 +0-0 1-1 0-2 3-3 3-4 5-5 3-6 4-7 7-8 9-9 10-10 11-11 14-13 13-14 15-15 15-16 16-17 18-18 19-20 22-21 20-22 21-24 22-25 23-26 19-27 26-28 +0-0 1-1 2-2 3-3 18-4 10-5 3-6 8-7 13-8 10-9 15-10 10-11 11-13 17-14 11-16 17-17 18-18 19-19 22-20 17-21 24-22 21-23 23-24 18-25 29-26 29-27 28-28 29-30 30-31 36-32 37-33 39-34 34-35 39-37 40-38 41-39 +0-0 1-1 4-2 4-3 7-4 6-5 9-6 13-7 10-8 11-9 14-10 16-11 17-12 12-13 19-14 24-16 20-17 21-18 26-19 27-20 28-21 26-22 29-23 22-24 30-25 23-26 31-27 34-28 36-29 38-30 40-31 39-32 34-33 42-34 43-35 45-36 43-37 36-38 46-39 +0-0 1-1 2-2 6-3 3-4 5-5 8-6 9-7 9-8 11-9 8-10 12-11 11-12 13-13 9-14 14-15 14-16 15-17 17-18 13-19 23-20 21-21 23-22 23-23 25-24 24-26 25-27 26-28 27-29 28-30 29-31 30-32 30-33 31-34 33-35 37-36 31-37 35-38 36-39 38-40 36-41 40-42 37-43 44-44 41-45 44-48 45-49 38-50 46-51 47-52 +0-0 2-1 2-2 3-3 6-4 3-5 9-6 4-7 5-8 11-9 11-10 10-11 14-12 15-13 16-14 18-15 16-16 20-17 21-18 20-19 21-20 22-21 23-22 +0-0 2-1 1-2 2-3 3-4 5-5 2-6 6-7 11-8 8-9 8-10 9-11 4-12 12-13 13-14 13-15 13-16 15-17 30-18 9-19 18-20 9-21 18-22 24-23 24-24 22-25 23-26 24-27 29-28 20-29 30-30 27-31 28-32 29-33 30-34 +0-0 1-1 2-2 3-3 4-4 5-5 7-7 8-8 9-9 11-10 17-12 10-14 14-15 0-16 15-17 17-19 18-20 18-21 13-22 22-25 19-26 20-27 21-28 25-29 24-30 26-31 27-32 26-33 26-34 28-36 28-37 29-38 31-39 32-40 33-41 34-42 30-43 35-44 36-45 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 4-7 7-8 5-9 8-10 19-12 9-13 10-14 14-15 15-16 14-17 12-18 20-19 20-20 8-22 25-25 24-26 22-27 26-28 27-29 24-30 29-31 26-32 27-33 32-34 32-36 31-37 31-38 32-39 34-40 36-42 38-43 39-44 37-45 40-46 +1-0 4-1 5-2 +7-1 8-2 10-3 11-4 13-5 14-6 16-7 16-8 13-9 19-10 13-11 8-12 35-13 8-14 22-15 23-16 24-17 26-20 26-21 31-24 29-25 34-27 34-28 31-29 32-30 32-31 40-32 41-33 38-34 39-35 35-36 47-37 47-38 46-39 41-40 48-41 49-42 52-43 57-44 54-45 52-46 56-47 54-48 57-49 61-50 62-51 63-52 64-53 65-54 67-55 67-56 69-57 73-58 74-59 74-60 73-61 73-62 77-63 78-64 73-65 77-66 77-67 84-68 +0-0 0-1 1-2 5-3 5-5 6-6 6-7 7-8 16-9 9-10 11-11 8-12 13-13 14-15 14-16 16-17 15-18 21-19 22-20 23-21 24-22 26-23 27-24 23-25 23-26 32-27 31-29 31-30 29-31 34-32 35-33 +0-0 1-1 0-2 5-3 3-4 5-5 4-6 5-7 6-8 6-9 16-10 10-11 17-12 12-13 16-14 9-15 16-16 21-17 35-18 18-19 18-20 23-21 23-23 24-24 24-25 22-27 23-28 25-29 26-30 34-31 29-32 30-33 35-34 34-35 34-36 42-37 42-38 39-39 72-42 42-44 44-45 46-46 49-47 50-48 51-49 52-50 53-51 54-52 53-53 56-54 57-55 58-56 60-57 60-58 58-59 62-60 63-61 63-62 65-63 66-64 67-65 68-66 68-67 70-68 71-69 72-70 +0-0 1-1 1-2 3-3 3-4 4-5 4-6 6-7 6-8 8-9 7-10 9-11 +0-0 0-1 1-2 1-3 3-4 4-5 7-6 8-7 10-8 11-9 12-10 6-11 13-12 13-13 16-14 15-15 16-16 16-17 17-18 17-20 24-21 21-22 22-23 26-24 14-25 26-26 +0-0 1-1 2-2 9-3 4-4 6-5 8-6 4-7 12-8 15-9 15-10 19-12 20-13 17-14 16-15 19-16 24-17 27-18 28-19 29-20 28-21 31-22 29-23 22-24 32-25 +0-0 1-1 3-3 4-4 5-5 7-6 8-7 6-8 8-9 10-10 11-11 13-12 16-13 16-14 17-15 17-16 21-17 21-18 22-19 23-20 +0-0 0-1 3-2 4-3 8-4 8-5 21-6 10-7 15-9 16-10 18-11 18-12 21-13 21-14 24-15 25-16 +1-0 8-1 3-2 4-3 6-5 7-6 10-7 8-8 5-10 10-11 12-13 13-14 12-15 11-16 12-17 13-18 14-19 17-20 21-21 17-22 22-23 19-24 23-25 24-26 23-27 25-28 +0-0 5-1 5-2 4-3 10-4 16-5 7-6 7-7 11-8 11-9 12-10 11-11 18-12 19-13 19-14 15-16 17-17 23-18 21-19 24-20 24-21 26-22 20-23 27-24 +0-0 1-1 3-2 3-3 3-4 6-5 7-6 8-7 9-8 10-9 11-10 13-11 16-12 14-13 15-14 16-15 17-16 +0-0 1-1 1-2 2-3 4-4 4-5 5-6 +0-0 2-1 2-2 14-3 4-4 0-5 6-6 7-8 8-9 9-10 12-11 12-12 13-13 14-14 18-16 18-18 16-19 19-20 20-21 22-22 18-23 24-24 +0-0 1-1 2-2 3-3 30-4 8-5 9-6 15-7 9-8 12-9 8-10 12-11 13-12 14-13 16-14 15-15 17-16 18-18 18-19 21-20 22-21 24-22 25-23 30-24 27-25 30-26 31-27 31-28 +0-0 2-1 3-2 4-3 4-4 +0-0 7-1 1-2 1-3 5-4 1-5 6-6 9-7 6-8 6-9 11-10 12-11 27-12 13-13 14-14 15-15 16-16 18-17 18-18 20-19 22-20 21-21 19-22 18-23 18-24 21-25 28-26 29-27 29-28 32-29 26-30 34-32 33-33 34-34 34-35 37-36 34-37 39-38 39-39 40-40 40-41 45-42 43-43 43-44 48-47 48-48 48-49 +0-0 2-1 7-2 7-3 4-4 19-5 10-7 13-8 10-9 12-10 15-11 17-12 16-13 25-14 21-15 22-16 20-17 27-18 24-19 33-20 33-21 36-22 +1-1 3-2 4-3 5-4 6-5 7-6 7-7 10-10 10-11 12-12 13-13 14-14 16-15 17-16 19-17 20-18 21-19 19-20 17-21 22-22 22-23 +5-0 2-1 5-2 6-4 9-5 3-6 13-7 7-8 17-10 19-11 20-12 19-13 22-14 23-15 24-16 25-17 28-18 32-19 30-20 32-21 34-22 35-23 37-24 39-25 +1-0 5-1 1-2 5-3 6-4 10-5 8-6 9-7 11-8 11-9 12-10 13-11 14-12 15-13 16-14 17-15 18-16 19-17 +0-0 1-1 6-2 4-3 7-4 8-6 11-7 16-8 12-9 13-10 15-11 18-12 13-13 19-14 23-15 17-16 18-17 26-18 22-19 24-21 26-22 29-23 27-24 26-25 30-26 31-27 32-28 33-29 +2-0 3-1 2-2 4-3 6-4 7-5 8-6 11-7 12-8 13-9 14-10 15-11 16-12 16-13 17-14 +0-0 0-1 4-2 5-3 6-4 7-5 8-6 +0-0 2-1 2-2 4-3 5-4 6-5 7-6 9-7 8-8 10-9 11-10 11-11 13-12 15-13 17-15 8-16 20-17 21-18 22-19 23-21 20-22 20-23 27-24 23-25 26-26 15-27 28-28 29-29 35-30 31-31 35-32 31-33 29-34 34-36 35-37 39-38 38-40 +0-0 3-1 3-2 5-3 6-4 7-5 8-6 10-7 9-8 9-9 12-10 12-11 14-12 16-13 18-14 18-15 25-16 21-17 22-18 23-19 24-21 31-22 27-23 28-24 29-25 31-26 32-27 33-28 34-29 35-30 36-31 35-32 43-33 41-34 41-35 43-36 35-37 39-38 47-39 47-40 +0-0 1-1 2-2 2-3 5-4 6-5 7-6 7-7 7-8 11-9 12-10 12-11 13-12 8-13 14-14 17-15 17-16 18-17 18-18 20-19 21-20 22-21 23-22 24-23 24-24 28-25 27-26 25-27 27-28 30-29 25-30 26-31 33-32 +0-0 1-1 2-3 3-4 3-5 0-6 5-7 7-8 7-9 10-10 8-11 6-12 14-14 11-15 12-16 16-17 14-18 13-19 14-20 16-21 16-22 17-23 18-24 21-25 20-26 21-27 21-28 20-29 24-30 +0-0 1-1 3-2 3-3 4-4 7-5 8-6 9-7 10-8 11-9 13-10 14-11 15-12 16-13 17-14 21-15 22-16 19-17 25-18 +0-0 1-1 2-2 5-3 4-4 7-5 8-6 9-7 11-8 12-9 13-10 15-11 14-12 18-13 19-14 19-15 17-16 26-17 21-19 20-20 22-21 23-22 23-23 24-24 27-25 27-26 29-27 30-28 31-29 +0-0 5-1 2-2 4-3 5-4 8-5 5-6 11-7 9-8 15-9 13-10 12-11 18-13 18-14 19-15 20-16 20-17 22-18 24-19 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 7-8 8-9 9-10 9-11 11-12 +0-0 1-1 1-2 9-3 10-4 11-5 12-6 13-7 14-9 7-10 8-11 18-13 14-14 14-15 14-16 15-17 16-18 15-19 17-20 19-21 18-23 18-24 22-25 24-26 24-27 25-28 26-29 27-30 28-31 29-32 29-33 32-34 33-35 34-36 33-37 34-38 34-39 24-40 38-41 34-42 38-43 30-44 40-45 +0-0 1-1 3-2 4-3 4-4 6-5 6-6 9-7 8-8 9-9 10-10 12-11 11-12 11-13 14-14 13-15 18-16 17-17 17-18 19-19 16-20 17-21 22-22 22-23 25-24 24-25 26-26 27-27 28-28 28-29 29-30 +1-0 2-1 5-2 0-3 7-4 3-5 3-6 4-7 8-8 7-9 5-10 5-11 8-12 11-14 12-15 12-16 7-17 12-18 14-19 11-20 16-21 18-22 14-23 15-24 19-26 22-27 23-28 32-31 37-32 24-33 25-34 25-35 19-36 28-37 27-38 25-39 29-40 30-41 31-42 33-44 32-46 34-47 36-49 34-50 36-51 37-52 +0-0 1-1 2-2 1-3 3-4 6-5 7-6 7-7 7-8 11-9 12-10 13-11 15-12 14-13 15-14 6-15 16-16 17-17 18-18 19-19 20-20 21-21 25-22 26-23 13-24 24-26 22-27 27-28 28-29 +0-0 1-1 2-2 2-3 3-4 4-5 6-6 7-7 8-8 5-9 12-10 11-11 10-12 13-13 15-14 16-15 16-16 18-17 +1-0 2-1 2-2 5-3 8-4 8-5 10-6 9-7 14-8 13-9 11-10 13-11 17-12 19-13 15-14 24-15 25-16 23-17 19-18 29-19 23-20 21-21 27-22 28-23 29-24 30-25 +0-0 1-1 2-2 3-3 3-4 5-5 5-6 6-7 10-8 8-9 11-10 11-11 9-12 16-13 14-14 19-15 17-16 17-17 24-18 18-19 23-20 23-21 24-22 25-23 26-24 28-25 30-26 +0-0 1-1 2-2 4-3 5-4 2-5 7-6 8-7 10-8 11-9 12-10 11-11 11-12 14-13 15-14 16-15 16-17 17-18 19-19 20-20 22-21 18-23 26-24 28-25 25-26 24-27 26-28 29-29 30-30 31-31 32-32 29-33 32-34 33-35 +0-0 0-1 1-2 +1-0 2-1 3-2 1-3 0-4 8-5 6-6 7-7 11-8 9-9 17-10 13-11 17-12 19-13 19-14 20-15 22-16 +0-0 1-1 6-3 2-4 7-5 6-6 7-7 7-9 10-11 13-12 14-13 16-14 17-15 18-16 19-17 +0-0 1-1 2-2 4-3 5-4 5-5 7-6 9-8 12-9 13-10 14-11 16-12 12-13 13-14 20-15 17-16 19-17 9-18 19-19 21-20 +0-0 1-1 3-2 3-3 6-4 4-5 8-6 6-7 8-8 9-9 15-10 11-11 12-12 16-13 21-14 19-15 20-16 21-17 23-18 24-19 +0-0 2-1 4-3 6-4 3-5 8-6 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 8-8 7-9 16-10 14-11 8-12 19-13 11-14 12-15 14-16 16-17 15-18 16-19 18-20 9-21 20-22 +0-0 2-1 2-2 4-3 6-4 3-5 8-6 9-7 10-8 11-9 15-10 13-11 11-12 15-13 16-14 18-15 17-16 19-17 24-18 18-19 20-20 20-21 31-22 24-23 38-24 20-25 34-26 30-27 28-28 33-29 31-31 21-32 33-33 38-35 27-36 39-37 39-38 +0-0 0-1 1-2 2-3 3-4 7-5 6-6 8-7 6-8 9-9 8-10 9-11 14-12 10-13 12-14 13-15 14-16 20-17 21-18 22-19 23-20 +0-0 2-1 2-2 4-3 5-4 8-6 6-7 9-8 10-9 11-10 11-11 9-12 11-13 14-15 14-16 15-19 20-21 16-22 17-23 19-24 20-25 20-26 24-27 21-28 25-29 23-30 26-31 27-32 30-33 29-34 29-35 24-36 33-38 35-39 35-40 36-41 36-42 37-43 +0-0 1-1 1-2 2-3 3-4 4-5 7-6 8-7 9-8 10-9 11-11 12-12 13-13 14-14 15-15 24-16 17-17 17-18 17-19 22-21 21-22 23-23 24-24 19-25 25-26 26-27 38-28 28-29 31-30 33-33 35-34 38-35 37-36 37-37 37-38 38-39 39-40 +0-0 0-1 1-2 3-3 3-4 3-5 3-6 4-7 6-8 6-9 7-10 10-11 8-12 9-13 10-14 13-15 12-16 13-17 14-18 12-19 16-20 19-21 18-22 19-24 20-25 20-26 +1-0 2-1 7-2 6-3 7-4 10-5 11-6 +0-0 1-1 2-2 1-3 6-4 4-5 4-6 5-7 8-8 10-9 10-10 9-11 12-12 11-13 12-15 13-16 +0-0 2-1 4-2 4-3 6-5 7-6 8-7 9-8 9-9 13-10 14-11 13-12 15-14 +0-0 1-1 15-2 4-3 4-4 6-5 8-7 9-8 15-9 8-10 17-11 11-12 18-13 19-14 15-15 17-16 21-17 16-18 28-19 12-20 30-21 31-22 21-23 20-24 24-25 10-26 25-27 13-28 14-29 20-30 31-31 32-32 32-33 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 9-7 8-8 9-9 10-10 11-11 12-12 14-13 15-14 13-15 17-16 15-17 7-18 19-19 21-20 20-21 21-22 23-23 23-24 25-26 26-27 26-28 29-29 27-30 29-31 40-32 30-33 36-34 32-35 33-36 34-37 36-38 38-39 40-40 40-41 41-42 42-43 43-44 +0-0 1-1 3-2 2-3 4-4 6-5 4-6 6-7 5-8 7-9 8-10 9-11 10-12 11-13 12-14 13-15 14-16 +0-0 0-1 2-2 3-3 5-4 4-5 5-6 7-7 12-8 7-9 7-10 7-11 8-12 15-13 15-14 16-15 15-16 15-17 16-18 17-19 3-20 18-22 21-24 26-27 24-28 30-31 33-32 30-33 34-34 39-35 36-36 38-37 39-38 36-39 40-40 41-41 42-42 +0-0 3-1 2-2 7-3 4-4 4-5 6-6 6-7 8-8 9-9 10-10 10-11 12-12 13-13 15-15 16-16 +0-0 1-1 1-2 3-3 5-4 7-5 9-6 8-7 11-8 24-9 13-10 16-11 17-12 18-13 18-14 18-15 20-16 19-17 22-18 19-19 23-20 +0-0 2-1 1-2 2-3 1-4 6-5 6-6 6-7 16-9 11-10 11-11 10-12 11-13 11-14 13-15 15-16 15-17 17-18 16-19 17-20 18-21 19-22 23-23 25-24 24-25 26-26 22-27 37-28 39-29 32-31 36-32 36-33 31-34 32-35 37-36 37-37 30-38 40-39 +0-0 1-1 3-2 4-3 5-4 6-5 6-6 2-7 7-8 18-9 10-10 11-11 14-12 12-13 16-14 16-15 19-16 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 9-8 9-9 11-10 +0-0 1-1 2-2 2-3 5-4 6-5 4-8 7-9 7-10 8-11 7-12 9-13 10-14 12-15 11-16 10-17 17-18 18-19 15-20 15-21 16-22 15-25 19-26 17-27 19-28 21-29 20-30 21-31 23-32 24-34 25-36 26-38 27-39 28-40 28-41 29-42 30-43 +0-0 0-1 3-2 3-3 4-4 6-5 6-6 7-7 9-8 10-9 11-10 12-11 +0-0 1-1 1-2 5-3 4-4 6-5 6-6 7-7 8-8 10-9 11-10 12-11 13-12 14-13 15-14 16-15 18-16 22-17 20-18 22-19 25-20 25-21 28-22 26-23 28-24 31-25 31-26 34-27 32-28 34-29 35-30 36-31 37-32 39-33 40-34 +0-0 3-1 4-2 2-3 5-4 6-5 8-7 9-8 11-9 12-10 13-11 14-12 15-13 15-14 17-15 18-16 19-17 20-18 21-19 23-20 23-21 24-22 26-23 27-24 +0-0 2-1 2-2 3-3 4-4 6-5 6-6 7-7 8-8 9-9 10-10 11-11 12-12 13-13 16-14 16-15 16-16 16-17 18-18 19-19 21-20 21-21 22-22 23-23 24-24 25-25 26-26 27-27 28-28 29-29 30-30 31-31 32-32 33-33 +0-0 0-1 1-2 1-3 1-4 2-5 +0-0 1-1 2-2 2-3 3-4 4-5 7-6 7-7 7-8 8-9 9-10 10-11 11-12 12-13 13-14 14-15 14-16 15-17 17-18 16-19 18-20 20-21 21-22 19-23 22-24 +0-0 3-1 2-2 5-3 4-4 6-6 7-7 8-8 8-9 13-10 13-11 15-12 16-13 17-14 16-15 19-16 20-17 21-18 22-19 23-20 24-21 25-22 26-23 27-24 28-25 29-26 30-27 31-28 +0-0 1-1 2-2 4-3 3-4 5-5 6-6 7-7 8-8 +0-0 1-1 2-2 4-3 5-4 6-5 3-6 7-7 8-8 9-9 10-10 11-11 12-13 13-14 14-15 15-16 16-17 18-18 19-19 22-20 20-21 21-22 22-23 23-24 24-25 22-26 25-27 27-28 28-29 26-30 29-31 +0-0 2-1 2-2 4-3 5-4 6-5 7-6 10-7 11-8 12-9 13-10 14-11 15-12 16-13 17-14 18-15 19-16 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 7-8 9-9 11-10 11-11 13-12 14-13 15-14 16-15 17-16 18-17 19-18 20-19 21-20 22-21 23-22 24-23 25-24 26-25 27-26 +0-0 0-1 1-2 3-3 3-4 4-5 7-6 6-7 8-8 8-9 11-10 10-11 10-12 12-13 +0-0 0-1 2-2 2-3 1-4 3-5 4-6 5-7 6-8 5-9 7-11 8-12 8-13 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 9-8 10-9 12-10 13-11 14-12 15-13 16-14 17-15 18-16 16-17 20-18 19-19 20-20 29-21 28-22 29-23 25-24 33-25 23-26 25-27 26-28 27-29 30-30 34-31 34-32 +3-0 3-1 4-2 5-3 1-4 9-5 8-7 9-8 10-9 11-10 13-11 12-12 13-13 13-14 14-15 19-17 20-18 20-19 24-20 25-23 27-24 27-25 24-26 28-27 +0-0 4-1 2-2 5-3 6-4 5-5 2-6 9-7 9-8 10-9 11-10 12-11 13-12 17-13 22-14 23-15 19-16 20-17 19-18 21-19 20-20 21-21 24-22 +0-0 4-1 2-2 3-3 8-4 5-5 10-8 11-9 11-10 9-11 13-12 13-13 16-14 17-15 17-16 15-17 18-19 20-20 20-21 20-22 21-23 22-24 25-25 24-26 25-27 27-28 +0-0 3-1 1-2 5-3 2-4 9-6 14-7 12-8 9-9 17-10 18-11 19-12 20-13 24-14 25-15 21-16 22-17 23-18 26-19 32-20 33-21 29-22 30-23 30-24 29-25 34-26 +1-0 3-1 2-2 3-3 4-4 4-5 3-6 7-7 8-8 6-9 8-10 13-11 10-13 11-14 12-15 12-16 13-17 15-18 11-19 16-20 +0-0 1-1 2-2 2-3 9-4 10-5 8-6 9-7 6-8 13-10 14-11 17-12 18-13 18-14 18-15 18-16 22-17 22-18 23-19 25-20 25-21 26-22 24-23 26-24 27-25 29-26 30-27 +0-0 2-1 2-2 1-3 3-4 4-5 5-6 6-7 5-8 7-10 8-11 9-12 10-13 6-14 12-15 +0-0 1-1 1-2 2-3 1-4 4-5 5-6 5-7 5-8 3-9 6-10 9-11 8-12 9-13 11-14 12-15 11-16 12-17 13-18 13-19 14-20 16-21 15-22 17-23 +0-0 1-1 6-2 6-3 3-4 8-5 2-6 4-7 5-8 2-9 12-11 13-12 14-13 12-14 15-15 16-16 19-17 19-18 20-19 24-20 23-21 26-22 27-23 28-24 29-25 22-27 30-28 +0-0 1-1 2-2 3-3 3-4 4-5 6-6 7-7 8-8 7-9 7-10 5-11 9-12 +0-0 1-1 1-2 3-3 3-4 3-5 8-6 14-7 8-8 11-9 9-10 12-11 23-13 17-15 18-16 19-17 20-18 20-19 22-20 22-21 23-22 +0-0 2-1 2-2 5-3 7-4 8-5 9-6 +0-0 1-1 3-2 3-3 9-4 6-6 7-7 5-9 10-10 +18-0 2-1 3-2 4-3 5-4 1-5 7-6 6-7 2-8 6-9 9-10 9-11 7-12 15-13 13-15 11-16 15-17 16-18 14-19 15-20 18-21 17-22 21-23 23-25 19-26 20-27 24-28 25-29 22-30 24-31 23-32 22-33 24-34 18-35 27-36 27-37 24-38 29-39 +0-0 1-1 1-2 3-3 6-5 7-6 8-7 6-8 5-9 10-10 11-11 12-12 13-13 15-14 16-15 16-16 16-17 +0-0 1-1 3-3 4-4 6-5 9-6 8-7 9-8 9-9 10-10 +0-0 1-1 3-2 3-3 2-5 5-6 8-7 9-8 10-9 11-10 12-11 11-12 5-13 13-15 14-16 15-17 16-18 15-19 19-20 16-21 20-22 +0-0 0-1 2-2 2-3 3-4 5-5 6-6 7-7 8-8 10-9 9-10 10-11 11-12 12-13 12-14 15-15 16-16 18-17 19-18 20-19 21-20 22-21 23-22 23-23 25-24 25-25 25-26 +0-0 1-1 2-2 3-3 5-4 5-5 6-6 7-7 9-8 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 9-8 9-9 10-10 11-11 6-12 28-13 16-14 20-15 17-16 18-17 21-18 22-19 23-20 24-21 26-22 27-23 28-24 +0-0 2-1 2-2 4-3 4-4 6-5 7-6 8-7 8-8 10-9 10-10 11-11 +2-0 3-2 5-3 6-4 7-5 8-6 8-7 10-8 11-9 12-10 13-11 13-12 15-13 14-14 16-15 13-16 18-18 19-19 21-20 22-21 21-22 23-23 +0-0 1-1 2-2 3-3 5-4 6-5 5-6 8-7 8-8 10-9 11-10 11-11 14-13 14-14 15-15 18-16 16-17 16-18 20-19 21-20 22-21 37-22 23-23 17-24 23-25 25-26 30-27 33-28 29-29 32-30 30-31 22-32 35-33 34-34 28-35 28-36 35-37 36-38 37-39 +0-0 4-1 2-2 7-4 2-5 8-6 8-7 13-8 14-9 15-10 15-11 15-12 18-13 +0-0 0-1 2-2 2-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 +0-0 1-1 5-2 5-3 3-4 9-5 10-6 7-7 12-8 9-9 15-10 15-11 11-12 17-13 18-14 +0-0 1-1 2-2 4-3 7-4 2-5 7-6 4-7 28-8 9-9 16-11 23-12 15-14 18-15 20-16 20-17 23-18 20-19 25-20 26-21 19-22 27-23 29-24 30-25 31-26 32-27 33-28 34-29 35-30 33-31 36-32 +0-0 2-1 2-2 4-3 4-4 5-5 +0-0 2-1 +0-0 1-1 1-2 3-3 3-4 3-5 2-7 7-8 5-9 11-10 8-11 11-12 7-13 10-14 16-16 17-17 13-18 12-19 18-20 17-21 22-22 22-25 23-26 22-27 30-28 26-29 30-30 30-31 11-32 31-33 33-34 32-35 30-36 36-39 36-40 +0-0 1-1 3-2 4-3 5-4 6-5 10-6 8-7 11-8 12-9 8-10 13-11 13-12 11-13 12-14 15-15 22-16 9-17 15-19 11-20 11-21 21-23 22-24 21-25 24-27 29-28 25-29 26-30 28-31 28-32 29-33 26-34 32-35 31-36 31-37 31-38 31-39 34-40 34-41 31-42 29-43 38-44 40-45 43-46 45-47 41-48 42-49 43-50 44-52 43-53 45-54 +0-0 1-1 3-2 3-3 5-4 6-5 7-6 8-7 10-8 11-9 11-10 9-11 13-12 18-13 19-14 17-15 21-16 +0-0 1-1 2-2 6-3 4-4 6-5 8-6 10-7 11-8 +0-0 1-1 0-2 2-3 2-4 4-5 5-6 5-7 7-8 9-9 10-10 10-11 7-12 12-13 +0-0 1-1 2-2 3-3 +0-0 0-1 3-2 2-3 3-4 3-5 5-7 6-8 9-10 12-12 8-13 13-14 14-15 14-16 15-17 12-18 17-19 18-20 17-21 19-22 20-23 21-25 22-26 23-27 23-28 30-29 24-30 27-31 27-32 28-33 29-34 30-35 +0-0 1-1 3-3 3-5 3-6 4-7 5-8 8-9 8-10 9-11 7-12 10-14 15-15 14-16 13-17 14-18 11-19 16-20 +0-0 2-1 3-2 5-3 0-4 6-5 6-6 6-7 8-8 8-9 9-10 +0-0 0-1 2-2 3-3 3-4 3-5 4-6 8-7 9-8 7-9 5-10 11-11 10-12 8-13 11-14 12-16 11-17 15-18 13-19 16-20 +0-0 0-1 1-2 2-3 4-4 6-5 7-6 +0-0 1-1 2-2 4-3 5-4 6-5 5-6 11-7 9-8 11-9 15-10 13-11 13-12 14-13 14-14 16-15 +0-0 0-1 2-2 4-3 5-4 3-5 7-6 6-7 10-8 12-9 12-10 15-11 12-12 15-13 16-14 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 12-11 13-12 14-13 14-14 15-15 +0-0 0-1 2-2 4-3 2-4 6-5 3-6 3-7 8-8 5-9 7-10 8-11 9-12 10-13 15-14 15-15 16-16 +1-0 1-1 3-2 3-3 6-4 7-5 8-6 9-7 10-8 11-10 12-11 13-12 14-13 17-15 18-16 18-17 19-18 20-19 21-20 26-21 26-22 29-23 31-24 32-25 23-26 33-27 +0-0 1-1 2-2 2-3 3-4 4-5 4-6 4-7 6-8 7-9 8-10 10-11 11-12 10-13 12-14 +1-0 2-1 4-2 1-3 8-4 10-5 11-6 +0-0 1-2 4-3 3-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 12-12 15-13 16-14 17-15 18-16 12-17 20-18 20-19 22-20 23-21 24-22 25-23 28-25 28-26 28-27 26-28 28-30 34-31 37-32 35-33 11-34 37-35 47-36 38-37 40-38 41-40 43-41 44-42 47-44 48-45 42-46 47-47 51-48 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 6-7 7-8 8-9 13-10 9-11 12-12 11-13 11-14 17-15 18-16 18-17 17-18 17-19 19-20 +0-0 1-1 2-2 3-3 4-4 10-5 4-6 9-7 8-8 9-9 6-10 10-11 11-12 12-13 12-14 17-15 15-16 16-17 14-19 31-20 20-21 14-23 24-24 25-25 26-26 25-27 23-28 27-29 28-30 29-31 30-32 31-33 +0-0 0-1 3-2 2-3 3-4 3-5 4-6 4-7 6-8 7-9 13-10 13-11 10-12 12-13 13-14 8-15 9-16 12-17 15-18 18-19 16-20 16-21 20-22 21-23 22-24 +0-0 2-1 1-2 4-3 5-4 7-5 7-6 9-7 10-8 11-9 16-10 13-11 15-12 19-13 21-14 18-15 20-16 23-17 24-18 23-19 20-20 26-21 +1-0 1-1 4-2 3-3 9-4 4-5 6-6 8-7 11-9 8-10 12-11 14-12 14-13 15-14 +0-0 1-1 2-2 5-3 6-4 7-5 8-6 8-7 8-8 8-9 0-10 12-12 14-14 16-15 11-16 16-17 14-18 18-19 13-20 21-21 21-22 21-23 23-24 24-25 25-26 +0-0 3-1 2-2 4-3 5-4 6-5 +0-0 0-1 1-2 2-3 1-4 3-5 2-6 4-7 6-8 5-9 1-10 8-11 8-12 9-13 10-14 12-15 9-16 10-17 13-18 +0-0 2-1 4-2 1-3 4-4 6-5 8-7 9-8 6-9 5-10 10-11 10-12 11-13 12-14 14-15 12-16 14-17 17-18 18-19 19-20 19-21 21-22 20-23 24-24 26-25 27-26 25-27 24-28 27-29 27-30 28-31 +2-0 1-1 2-2 3-4 3-5 5-6 4-7 7-8 8-9 9-10 5-12 11-13 11-14 13-15 14-16 2-17 14-18 14-19 15-20 18-21 19-22 20-23 27-24 21-25 21-26 23-27 25-28 24-29 30-32 30-34 30-35 25-36 33-37 39-38 38-39 33-40 34-41 36-42 36-43 38-44 33-45 46-46 39-47 42-49 44-50 44-52 46-53 +0-0 0-1 3-2 2-3 3-4 4-5 7-6 6-7 7-8 8-9 9-10 10-11 +0-0 1-1 1-2 2-3 2-4 4-5 5-6 6-7 8-8 7-9 10-10 10-11 11-12 14-13 13-14 15-15 +0-0 1-1 6-2 7-3 7-4 2-5 3-6 8-8 13-9 16-11 11-12 12-13 12-14 12-15 9-16 11-17 18-18 19-19 19-20 +15-0 1-1 3-2 4-4 6-5 7-6 7-7 6-8 15-9 15-11 16-12 17-13 17-14 16-15 31-16 23-19 26-22 31-23 32-24 26-25 30-26 26-28 23-29 35-31 35-32 37-33 33-34 39-36 34-39 43-41 49-42 44-43 45-45 41-46 48-48 51-50 52-51 53-52 54-53 58-54 59-56 81-58 61-59 62-60 63-61 64-62 61-63 60-64 66-65 69-66 66-67 67-68 70-69 68-71 71-72 76-73 74-74 75-75 76-76 77-77 76-78 79-79 72-80 72-81 75-82 43-83 81-84 +0-0 1-1 2-2 3-3 3-4 5-5 6-6 7-7 8-8 9-9 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 9-8 10-9 11-10 12-11 13-12 13-13 14-14 15-15 +0-0 1-1 3-2 3-3 4-4 6-5 7-6 5-7 8-8 9-9 10-10 11-11 12-12 13-13 14-14 15-15 16-16 20-17 20-18 22-19 22-20 17-21 23-22 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 6-8 12-9 9-10 10-11 13-12 12-13 15-14 17-15 19-19 20-20 24-21 23-22 24-23 25-24 25-25 26-26 27-27 30-28 31-29 28-30 36-31 33-33 35-35 36-36 +0-0 0-1 2-2 3-4 7-5 6-6 6-7 3-8 7-9 8-10 8-11 9-12 9-13 11-14 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 9-8 9-9 11-10 12-11 13-12 +0-0 1-1 2-2 5-3 3-4 4-5 5-6 7-7 6-8 1-9 10-10 12-11 13-12 15-13 17-14 17-15 18-16 +0-0 2-1 2-2 6-3 5-4 7-5 7-6 8-7 +0-0 2-1 3-2 3-3 +0-0 1-1 1-2 3-3 3-4 6-5 7-6 5-7 8-8 9-9 10-10 11-11 12-12 13-13 14-14 15-15 21-16 16-17 17-18 18-19 19-20 20-21 21-22 22-23 23-24 24-25 25-26 26-27 +0-0 1-1 2-2 3-3 3-4 4-5 4-6 6-8 8-9 9-10 9-11 8-12 9-13 10-14 13-15 11-16 12-17 14-18 13-19 15-20 +0-0 1-1 1-2 2-3 3-4 4-5 5-6 5-7 7-8 8-10 5-11 8-12 12-13 9-14 12-15 11-16 11-17 14-18 15-20 16-21 16-22 17-23 +0-0 1-1 1-2 4-3 2-4 5-5 4-6 8-8 8-9 10-10 10-11 11-12 12-13 13-14 +0-0 1-1 4-3 5-4 5-5 8-6 10-7 11-8 6-9 5-10 10-11 12-12 15-14 12-15 15-16 15-17 17-18 18-19 19-20 20-21 20-22 22-23 23-24 24-26 27-27 28-29 29-30 30-31 32-33 33-34 36-36 29-37 35-38 54-39 37-40 40-41 38-42 41-43 39-45 43-46 44-47 44-48 45-49 44-50 46-51 49-52 50-54 51-55 52-56 53-57 55-58 56-59 58-60 57-61 59-62 56-63 60-64 61-65 +0-0 5-1 4-3 7-4 9-5 11-6 12-7 15-9 16-10 +3-0 6-1 +0-0 0-1 2-2 5-3 3-4 4-5 5-6 5-7 6-8 6-9 8-10 6-11 11-12 12-13 12-14 10-15 14-16 15-17 13-18 11-20 18-21 20-24 21-25 21-26 20-27 22-28 23-29 24-30 25-31 26-32 27-33 28-34 29-35 30-36 30-37 33-38 34-39 35-40 23-41 31-43 37-44 36-45 37-46 41-47 39-49 40-50 40-52 43-53 +0-0 1-1 2-2 8-3 5-5 7-6 10-7 8-8 9-9 4-11 13-12 13-13 21-14 11-15 18-16 17-17 19-18 17-19 21-20 +0-0 1-1 2-2 3-3 4-4 4-6 7-7 7-9 12-10 11-11 14-12 15-15 15-16 16-17 16-18 18-19 19-20 21-21 18-22 22-23 23-25 29-26 29-27 29-28 29-29 29-30 31-31 32-32 33-33 34-34 35-35 36-36 37-37 38-38 32-39 41-41 42-42 43-43 +2-0 2-1 2-2 4-3 6-4 6-5 6-6 6-7 4-8 7-9 11-10 9-11 12-12 10-13 19-15 16-16 15-17 18-18 20-19 +0-0 +0-0 0-1 2-2 2-3 3-4 3-5 5-6 6-7 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 7-7 12-8 11-9 12-10 11-11 11-12 17-13 13-14 18-15 19-16 30-17 25-19 27-20 28-21 29-22 30-23 32-24 33-25 +1-0 2-1 3-2 4-3 0-4 6-5 6-6 10-7 11-8 11-9 14-10 15-11 9-12 14-13 16-14 19-15 20-16 23-18 26-19 27-20 36-21 25-22 34-24 28-25 32-26 34-27 30-28 39-29 40-30 32-31 41-32 45-33 46-34 43-35 47-36 +2-0 3-1 5-2 2-3 4-4 7-5 8-6 10-7 10-8 11-9 16-10 16-11 17-12 17-13 18-14 23-15 24-16 22-17 32-18 29-19 22-20 28-21 27-22 27-23 26-24 26-25 29-26 19-27 33-28 33-29 36-30 37-31 38-32 47-33 40-34 41-35 42-36 43-37 46-38 46-39 42-40 48-41 49-42 50-43 +0-0 0-1 2-2 2-3 4-4 5-5 11-6 6-7 9-8 12-9 14-11 8-12 8-13 35-14 22-15 22-16 22-17 28-18 26-19 23-20 29-21 30-22 30-23 32-25 34-26 35-27 +0-0 3-2 4-3 9-5 9-6 10-7 0-8 11-9 16-10 17-11 13-12 15-13 17-15 23-16 20-17 21-18 24-19 12-20 29-22 29-23 29-24 29-26 42-27 32-29 32-30 35-31 35-32 35-33 39-34 35-35 42-36 43-37 45-38 45-39 45-40 47-41 48-42 38-43 49-44 +0-0 0-1 1-2 4-3 5-4 6-5 7-6 8-7 6-8 9-9 10-10 10-11 11-12 13-13 10-14 13-15 14-16 15-17 +0-0 1-1 2-2 4-3 3-4 6-5 7-6 8-7 10-8 11-9 11-10 13-11 18-12 13-13 14-14 18-15 19-16 21-17 21-18 23-19 25-20 26-21 26-22 24-23 34-24 30-25 28-26 27-27 33-28 33-29 5-30 36-31 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 6-7 7-8 +0-0 1-1 2-2 4-3 4-4 4-5 5-6 10-7 7-8 8-9 9-10 11-11 12-12 12-13 14-14 15-15 16-16 20-17 20-18 22-19 22-20 25-21 24-22 15-23 26-24 28-25 28-26 32-27 31-28 32-29 34-30 35-31 37-32 36-33 39-34 40-35 42-36 43-37 37-38 37-39 +0-0 2-1 1-2 4-3 3-4 4-5 21-6 5-7 6-8 11-9 9-10 10-11 1-14 42-16 19-17 23-18 19-19 28-20 19-21 5-22 29-23 29-24 29-25 30-26 37-27 33-28 26-29 35-30 28-31 29-32 40-33 40-34 40-35 35-36 36-37 34-38 46-39 47-40 47-41 +0-0 4-1 3-2 4-3 6-4 7-5 8-6 10-7 14-8 10-9 12-10 16-11 18-12 17-13 19-14 20-15 20-16 23-17 23-18 26-19 18-20 28-21 27-22 31-23 30-24 30-25 34-26 34-27 36-28 38-29 40-30 41-31 42-32 43-33 37-34 44-35 +0-0 1-1 4-2 4-3 6-4 9-5 12-6 16-7 8-8 11-9 16-10 18-11 19-12 20-13 22-14 16-15 24-16 23-17 26-18 27-19 34-20 30-21 30-22 30-23 30-24 34-25 34-26 24-27 47-28 41-29 41-30 41-31 33-32 40-33 35-34 36-35 43-36 49-37 45-38 50-39 +0-0 1-1 2-2 5-3 4-4 6-5 3-6 12-7 13-8 3-9 12-10 14-14 18-15 14-16 8-17 18-18 10-19 11-20 21-21 39-22 25-23 20-24 25-26 26-27 21-28 27-29 29-30 19-31 31-32 35-33 33-34 34-35 35-36 36-37 37-38 38-39 39-40 +0-0 2-2 3-3 5-4 5-5 11-6 5-7 7-8 8-9 9-10 10-11 10-12 14-13 21-14 18-15 18-16 18-17 21-18 13-19 23-21 32-22 33-23 34-24 35-25 36-26 41-28 39-29 40-32 43-33 38-34 42-35 44-36 26-37 44-38 28-39 30-40 48-41 +0-0 2-1 1-2 3-3 5-4 6-5 6-6 7-7 4-8 10-9 11-10 9-11 13-12 8-14 11-15 13-16 14-17 19-18 16-19 18-20 18-21 35-22 21-23 22-24 22-25 24-26 22-27 26-28 28-29 29-30 28-31 31-32 32-33 33-34 30-35 36-36 48-39 34-40 32-41 37-42 38-43 38-44 40-45 42-46 43-47 41-48 47-50 37-51 46-52 45-53 48-55 +0-0 2-1 2-2 4-3 11-4 12-5 13-6 7-7 25-8 5-9 6-10 7-11 18-12 18-13 13-15 21-16 17-17 25-18 22-19 26-20 27-21 24-22 19-23 28-24 +0-0 1-1 2-2 2-3 7-4 8-5 6-6 10-7 11-8 9-9 8-10 12-12 12-13 +0-0 1-1 2-2 8-3 7-4 8-5 4-6 9-7 9-8 9-9 14-10 12-11 12-12 13-13 14-14 16-15 17-16 18-17 18-18 3-19 19-20 20-21 13-22 24-23 21-24 27-25 28-26 30-27 30-28 31-29 32-30 33-31 34-32 35-33 36-34 36-35 37-36 40-37 41-38 42-39 43-40 46-41 46-42 47-43 49-44 +0-0 2-2 3-3 11-4 12-5 5-6 6-7 9-8 10-10 11-11 13-12 15-13 21-14 19-15 17-16 17-17 19-18 19-19 21-20 24-21 24-22 23-23 26-24 27-25 28-27 34-28 31-29 37-30 39-31 33-32 35-33 40-34 38-36 39-37 40-38 +0-0 1-1 2-2 5-3 4-4 5-5 5-6 8-7 7-8 9-9 9-10 13-11 14-12 15-13 16-14 14-15 18-16 17-17 19-18 28-19 29-20 19-21 23-22 22-23 23-24 25-26 24-27 32-28 21-29 31-30 26-31 31-32 34-33 34-34 34-35 38-36 37-37 38-38 39-39 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 8-10 14-11 12-12 13-13 15-14 16-15 14-17 15-18 11-19 19-21 20-22 23-24 9-26 23-27 22-28 24-29 25-30 26-31 43-32 29-33 28-34 34-35 34-36 31-37 34-38 33-39 36-40 35-41 36-42 41-43 41-44 39-45 41-46 41-47 42-48 44-49 45-50 42-51 45-52 48-53 +2-0 1-1 2-2 0-3 3-4 5-5 4-6 7-7 6-8 7-9 9-10 17-11 10-12 11-13 12-14 13-15 14-16 15-17 16-18 18-19 19-20 20-21 21-22 22-23 17-24 24-25 23-26 27-27 26-28 25-29 30-30 31-31 30-32 32-33 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 6-7 7-8 8-9 10-10 11-11 12-12 12-13 13-14 14-15 15-16 16-17 17-18 18-19 17-20 18-21 21-22 21-23 21-24 23-25 22-26 25-27 24-28 26-29 +1-0 2-1 3-2 4-3 6-5 5-6 10-7 12-8 14-9 13-10 25-11 15-12 18-13 19-14 11-15 9-16 10-17 25-18 24-19 26-20 +1-0 5-1 3-2 4-3 7-4 6-5 9-6 15-7 10-8 12-9 11-10 10-11 14-12 15-13 16-14 16-15 16-16 17-17 19-18 19-19 21-20 23-21 22-22 24-23 22-24 48-25 28-27 27-28 37-29 25-31 38-33 42-34 36-35 42-37 36-38 29-39 44-40 44-41 43-42 33-44 48-45 48-46 48-47 +0-0 1-1 3-2 1-3 5-4 5-5 7-6 10-7 8-8 9-9 10-10 11-11 12-12 14-13 15-14 16-15 18-16 18-17 19-18 19-19 25-20 26-21 23-22 23-23 23-26 22-27 27-28 +1-0 1-1 2-2 4-3 3-4 6-5 7-6 8-7 9-8 5-9 11-10 13-11 13-12 17-13 16-14 17-15 17-16 22-18 22-19 26-20 23-21 27-23 32-24 31-25 29-26 34-27 29-28 37-29 38-30 35-31 36-32 39-33 +0-0 0-1 1-2 2-3 6-5 8-7 6-8 7-9 8-10 9-11 10-12 11-13 11-14 12-15 11-16 13-17 14-18 14-19 16-20 17-21 18-22 19-23 20-24 21-25 +0-0 1-1 1-2 3-3 4-4 4-5 5-6 6-7 7-8 8-9 +0-0 0-1 2-3 3-4 1-5 2-6 6-7 5-9 5-10 4-11 6-12 11-14 10-15 8-16 13-17 12-18 10-19 14-20 15-21 17-22 19-23 20-24 20-26 18-27 19-28 24-29 25-30 15-31 23-32 24-33 19-34 27-35 28-36 29-37 30-38 29-39 28-40 29-41 31-42 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 9-7 4-8 8-9 17-10 11-11 17-12 14-13 18-14 14-15 21-16 18-17 18-18 19-19 17-20 21-21 22-22 +0-0 3-1 3-2 6-3 5-4 6-5 8-6 9-7 9-8 12-9 13-10 14-11 15-12 16-13 17-14 18-15 20-16 23-17 20-18 24-19 25-20 23-21 27-22 31-23 32-24 29-25 30-26 28-27 33-28 +0-0 0-1 2-2 2-3 6-4 3-5 4-6 8-8 8-9 14-10 10-11 12-12 11-13 17-14 19-15 15-16 22-20 14-21 21-22 21-23 23-24 25-25 24-26 23-27 26-28 +1-0 3-1 3-2 5-3 6-4 6-5 11-6 7-7 13-8 14-9 12-10 16-11 18-12 19-13 19-14 17-15 22-16 19-17 23-18 25-19 23-20 27-21 23-22 28-23 29-24 30-25 +1-0 2-1 2-2 1-3 6-4 5-5 10-6 6-7 11-8 10-9 12-11 13-12 14-13 15-14 16-15 17-16 18-17 22-18 20-19 23-20 25-21 25-22 27-23 26-24 28-25 +1-0 3-1 1-2 8-3 4-4 7-5 10-6 8-7 9-8 9-9 17-10 15-11 14-12 16-13 17-14 17-15 21-16 20-17 24-18 22-19 25-20 23-21 26-22 28-23 29-24 27-25 31-26 31-27 +0-0 1-1 0-2 3-3 4-4 5-5 7-6 7-7 6-8 8-9 10-10 9-11 13-12 9-13 2-14 14-16 17-17 19-18 20-19 25-20 22-22 27-23 24-24 26-25 28-26 28-27 32-29 31-30 34-31 35-32 33-33 33-34 36-35 +0-0 1-1 3-2 4-3 3-5 3-6 3-7 5-8 5-9 7-10 +0-0 1-1 2-2 4-3 5-4 7-5 7-6 11-7 16-8 10-9 13-11 12-12 15-13 15-14 +0-0 4-1 5-2 2-3 2-4 7-5 14-6 7-7 9-8 10-10 9-11 4-12 16-14 25-15 17-16 18-17 19-18 17-19 17-20 17-21 19-23 22-24 27-25 27-26 29-27 28-28 29-29 30-31 27-32 27-33 29-34 36-35 +0-0 4-1 3-2 5-3 7-4 13-5 14-6 15-7 16-8 12-9 8-10 17-11 +1-0 1-1 1-2 5-4 7-5 5-6 6-7 11-8 11-9 12-10 10-11 12-12 16-13 16-14 17-15 13-16 20-17 21-19 22-20 23-21 20-22 19-23 22-24 22-25 24-26 +0-0 1-1 2-2 3-3 4-4 4-5 7-6 7-7 11-8 11-9 13-10 16-13 16-15 17-16 18-17 18-18 18-19 21-20 19-21 23-22 21-23 18-24 23-25 26-26 29-28 30-29 31-30 30-31 30-32 31-33 26-34 33-35 34-36 35-37 +2-0 3-1 2-2 4-3 3-4 5-5 5-6 10-9 10-10 12-11 13-12 14-13 15-15 15-16 16-17 17-18 18-19 19-20 22-21 22-22 21-23 21-24 20-25 25-26 +0-0 1-1 2-2 6-3 3-4 7-5 10-7 12-8 11-9 12-10 14-11 14-12 19-13 17-14 16-15 20-16 +1-0 2-1 3-2 4-3 3-4 6-5 4-6 8-7 5-8 10-9 10-10 10-11 16-12 11-13 12-14 13-15 14-16 20-17 17-18 17-19 20-20 21-21 23-22 21-23 32-24 32-25 32-26 32-27 30-28 34-29 32-30 36-31 37-32 39-33 39-34 39-35 41-36 42-37 17-38 44-39 +3-0 1-2 3-3 4-4 4-5 5-6 6-7 10-8 7-9 7-10 9-11 15-12 10-13 14-14 13-15 15-17 16-18 17-19 18-20 15-21 19-22 21-23 21-24 21-25 26-26 25-27 26-28 26-29 27-30 28-31 29-32 30-33 31-34 +2-0 0-1 1-2 1-3 2-4 5-5 7-6 8-7 14-8 3-12 10-13 11-14 12-15 13-16 14-17 15-18 17-20 15-21 32-22 19-23 21-24 20-25 20-26 21-27 21-28 22-29 25-30 26-31 26-32 23-33 27-34 29-35 30-36 28-38 33-39 32-40 +0-0 2-1 1-2 3-4 5-5 3-6 5-7 6-8 7-9 10-10 9-11 11-12 12-13 16-14 12-15 14-16 13-17 17-18 16-19 19-20 15-21 20-23 +0-0 1-1 2-2 3-3 3-4 6-5 5-6 9-7 7-8 11-9 9-10 10-11 17-12 12-13 12-14 15-15 16-16 16-17 15-18 18-19 17-20 19-21 +0-0 1-1 3-2 3-3 3-4 4-5 4-6 6-7 7-8 8-9 9-10 +0-0 1-1 2-2 4-3 5-4 7-5 8-6 6-7 6-8 11-9 9-10 9-11 13-12 13-13 13-14 15-15 13-16 15-17 +0-0 2-1 2-2 3-3 3-4 5-5 8-6 5-7 9-8 7-9 8-10 9-11 10-12 11-13 +1-0 1-1 2-2 2-3 2-4 2-5 6-6 4-7 7-8 8-9 7-10 9-11 11-12 11-13 13-15 12-16 12-17 17-18 18-19 19-20 19-21 19-22 +0-0 1-1 2-2 4-3 6-4 3-5 2-6 6-7 9-8 7-9 9-10 15-11 13-12 14-14 15-15 15-16 16-17 13-18 13-19 17-20 6-21 16-22 20-23 20-24 23-25 27-26 21-27 21-28 6-29 31-30 22-32 23-33 25-34 27-35 27-36 28-37 28-38 30-39 30-40 31-41 +1-0 1-1 2-2 3-3 5-4 6-5 9-6 13-7 13-8 16-9 14-10 20-11 14-12 20-13 21-14 +0-0 1-1 2-2 5-3 3-4 8-5 8-6 6-7 11-8 10-9 12-10 13-11 14-12 15-13 +0-0 1-1 3-2 3-3 6-4 3-5 9-6 10-7 7-8 13-9 13-10 4-11 14-12 12-13 12-14 4-15 15-16 14-17 17-19 20-20 21-21 21-22 23-23 24-24 25-25 21-26 25-27 43-28 26-29 25-30 28-31 30-32 31-33 30-34 32-35 36-36 35-37 35-38 34-39 34-40 38-41 40-44 41-46 42-47 43-48 +0-0 5-1 2-2 3-3 3-4 4-5 6-6 6-7 8-8 10-9 11-10 14-11 14-12 15-13 16-14 12-15 17-16 19-17 20-18 14-19 18-20 20-21 21-22 +0-0 2-1 3-2 5-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 12-11 14-12 16-13 15-14 16-15 17-16 +1-0 1-1 2-2 3-3 4-4 9-5 7-7 9-8 9-9 15-10 16-11 10-12 11-13 11-14 14-15 19-17 19-18 19-19 20-20 +0-0 1-1 2-2 4-3 6-4 6-5 6-6 5-7 6-8 10-9 11-10 15-11 13-12 14-13 14-14 19-15 19-16 21-17 19-18 22-19 23-20 21-21 26-22 27-23 28-24 29-25 30-27 31-28 32-29 34-30 27-31 34-32 35-33 +1-0 8-1 3-2 2-3 5-4 6-5 5-6 6-7 14-9 16-10 16-11 25-12 17-13 17-14 18-15 14-16 18-17 23-18 24-19 26-20 28-21 28-22 28-23 30-24 33-25 33-26 33-27 34-28 35-29 35-30 33-31 41-32 42-33 35-34 39-35 42-36 43-37 45-38 45-39 46-40 53-41 52-42 51-43 50-44 60-45 55-46 54-48 43-49 54-50 69-51 62-52 63-53 63-54 64-55 49-57 60-58 67-59 51-60 71-61 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 7-8 10-9 9-10 12-11 12-12 12-13 12-14 30-15 17-16 17-17 19-18 20-19 21-20 21-21 21-22 25-23 35-24 25-25 28-26 27-27 28-28 24-29 32-30 37-31 34-32 39-34 +0-0 1-1 1-2 2-3 2-4 4-5 4-6 3-7 5-8 5-9 6-11 9-12 7-13 8-14 11-15 12-16 13-17 16-18 14-19 14-20 18-21 18-22 +0-0 3-1 2-2 4-3 4-4 4-5 7-6 9-7 9-8 9-9 9-10 14-11 11-12 14-14 16-15 15-16 17-17 18-18 20-19 16-20 23-21 24-22 23-23 27-24 29-25 33-26 29-27 30-28 31-29 29-30 29-31 33-32 45-33 41-34 43-35 41-36 44-38 44-39 37-40 46-41 46-42 +9-0 3-1 4-2 4-3 5-4 5-5 7-6 10-7 11-8 12-9 13-10 15-11 15-12 14-13 10-14 17-15 18-16 4-17 20-18 25-19 28-20 20-21 21-22 23-23 28-24 29-25 30-26 24-27 15-28 31-29 33-30 29-31 30-32 35-33 26-34 44-35 39-37 26-38 41-39 41-41 43-42 38-43 44-44 +0-0 1-1 2-2 5-3 4-4 5-5 6-6 6-7 7-8 8-9 10-10 9-11 10-12 13-13 16-14 13-15 14-16 18-17 17-19 21-20 22-21 23-22 22-23 26-24 28-25 30-26 30-27 31-28 32-29 +0-0 2-1 0-2 4-3 10-4 5-5 6-6 8-7 8-8 8-9 9-10 11-11 12-12 7-13 13-14 14-15 +0-0 1-1 1-2 1-3 3-4 4-5 6-6 3-7 9-8 7-9 9-10 8-11 10-13 12-15 13-16 14-17 15-18 18-21 17-22 17-23 18-25 20-26 23-27 21-28 22-29 23-30 25-31 26-32 24-33 27-35 +0-0 1-1 2-2 4-3 6-4 7-5 +0-0 1-1 2-2 3-3 +0-0 1-1 2-2 2-3 6-4 4-5 5-6 6-7 7-8 8-9 7-11 10-12 12-13 12-14 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 8-8 9-9 10-10 8-11 9-12 13-13 14-14 15-15 16-16 14-17 16-18 15-19 +1-0 3-1 2-2 4-4 3-5 6-6 7-7 9-8 10-9 10-10 11-11 13-12 14-13 12-14 15-15 16-16 17-17 16-18 18-19 21-20 21-21 20-22 21-23 22-24 23-25 +0-0 1-1 1-2 3-3 4-4 5-5 7-6 8-7 8-8 10-9 12-10 11-11 13-12 +3-0 3-1 0-2 4-3 6-4 12-5 10-6 10-7 17-8 12-9 14-10 15-11 17-12 18-13 +1-0 1-1 1-2 3-3 4-4 7-5 7-6 8-7 12-8 10-9 11-10 15-11 16-12 16-13 16-14 14-15 18-16 22-17 18-18 26-19 20-20 21-21 22-22 33-23 28-24 34-25 31-27 37-28 30-29 36-30 38-31 40-32 36-34 43-35 38-36 45-37 46-38 46-39 47-40 46-41 48-42 49-43 +0-0 1-1 3-2 4-3 6-4 8-5 8-6 10-7 11-8 12-9 +1-0 1-1 1-2 4-3 3-4 3-5 6-6 8-7 5-8 9-9 7-10 12-11 10-12 13-13 14-14 16-15 16-16 17-17 19-18 20-19 +5-0 1-2 0-3 5-4 5-5 9-6 7-8 7-9 13-10 14-11 13-12 16-13 16-14 18-15 19-16 21-17 20-18 23-19 24-20 25-21 26-22 26-23 27-24 +2-0 2-1 3-2 4-3 6-4 7-5 8-6 6-7 8-8 12-9 +0-0 0-1 1-2 1-3 3-4 2-5 3-6 5-7 5-8 6-9 7-10 7-11 9-12 8-13 10-14 11-15 +0-0 1-1 3-2 2-3 3-4 3-5 4-6 6-8 7-9 8-10 9-11 +0-0 1-1 2-2 4-3 3-4 3-5 7-6 10-8 10-9 3-10 12-11 13-12 14-13 18-14 13-15 14-16 15-17 16-18 17-19 21-20 20-21 21-22 23-23 24-24 25-25 26-26 27-27 +0-0 0-1 0-2 2-3 3-4 5-5 6-6 7-7 9-8 9-9 11-10 12-11 14-12 15-13 15-14 15-15 20-16 17-17 21-18 22-19 20-20 20-22 24-23 24-24 25-25 +0-0 5-1 1-2 2-3 3-4 6-5 5-6 8-7 7-8 8-9 8-10 11-11 12-12 12-13 13-14 13-15 14-16 15-17 16-18 17-19 13-20 13-21 19-22 20-23 22-25 19-26 24-27 25-28 24-29 24-30 27-32 25-33 28-34 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 7-8 9-9 10-10 11-11 11-12 +0-0 1-1 8-2 2-3 8-6 9-7 3-8 3-9 11-11 13-12 10-13 13-14 13-15 14-16 14-17 15-18 16-19 19-20 11-21 19-22 18-23 23-25 24-26 23-27 22-28 24-29 25-30 27-31 27-32 26-33 29-34 29-35 26-36 31-37 33-38 32-39 32-40 35-41 +0-0 1-1 2-2 3-3 6-4 4-5 4-6 5-7 5-8 8-9 7-10 8-11 12-12 13-13 11-14 14-15 +0-0 0-1 3-2 4-3 4-4 6-5 7-6 7-7 8-8 +0-0 1-1 3-2 5-3 4-4 5-5 7-6 8-7 10-8 13-9 11-10 13-11 12-12 14-13 15-14 16-15 17-16 18-17 20-18 18-19 21-20 21-22 21-25 47-26 30-28 27-29 27-30 31-31 32-32 31-33 34-34 35-35 36-36 37-38 38-39 39-40 41-41 42-42 41-43 45-44 42-45 45-46 49-48 47-49 49-50 50-51 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 5-7 7-8 12-9 9-10 9-11 15-12 2-13 12-14 16-15 17-16 16-17 16-18 12-19 18-20 19-21 22-22 21-23 21-24 23-27 24-28 +0-0 0-1 3-2 4-3 5-4 3-6 8-7 7-8 6-9 9-10 8-11 11-12 11-15 11-16 12-17 16-18 14-19 15-20 16-21 17-22 22-23 20-24 20-25 22-26 25-27 25-28 26-29 26-31 28-32 31-33 21-34 30-35 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 10-9 9-10 9-11 11-12 12-13 13-14 +0-0 1-1 1-2 1-3 5-5 5-6 7-7 8-8 8-9 10-10 9-11 11-12 11-13 12-14 13-15 14-16 +0-0 2-1 2-2 4-3 5-4 6-5 6-6 8-7 9-8 10-9 11-10 11-11 13-12 15-13 16-14 17-15 20-16 18-17 20-18 21-19 22-20 23-21 25-22 26-23 27-24 28-25 28-26 29-27 30-28 +5-0 1-1 2-2 3-3 0-4 1-5 3-6 3-7 7-9 4-10 8-11 7-12 9-13 12-14 10-15 10-16 14-17 9-18 11-20 16-21 20-23 13-24 14-25 17-26 18-27 19-28 16-29 15-30 22-31 23-32 11-34 21-35 25-36 26-37 27-38 28-39 28-41 30-42 30-43 30-44 32-45 +0-0 2-1 1-2 2-3 2-4 3-5 5-6 6-7 10-8 9-9 14-10 12-11 13-12 3-13 14-14 14-15 17-16 15-17 19-18 15-19 20-20 16-22 23-23 22-24 24-25 25-26 29-27 29-28 29-29 31-30 31-31 31-32 37-34 32-35 32-36 36-37 32-38 39-40 48-41 42-42 43-43 44-44 45-45 46-46 41-47 48-48 50-49 50-50 49-51 +5-0 5-1 26-2 4-4 4-5 5-6 6-7 7-8 8-9 19-10 12-11 11-12 14-13 13-14 16-15 14-16 13-17 14-18 18-19 19-20 23-21 21-23 22-24 23-26 23-27 26-28 26-29 28-30 27-31 28-32 37-33 28-34 37-35 30-36 31-37 32-38 33-39 37-40 46-43 46-44 41-45 42-46 51-47 33-48 46-49 46-50 49-51 49-52 54-53 54-54 57-55 54-56 54-57 56-58 57-59 59-60 60-61 61-62 58-63 62-64 63-65 +0-0 3-1 5-2 2-3 2-4 6-5 4-6 5-7 7-8 7-9 12-11 10-12 13-13 10-14 10-15 15-16 17-17 18-18 19-20 19-21 23-22 24-23 26-25 26-26 28-28 15-29 33-30 29-31 28-36 40-37 28-38 40-40 28-41 57-46 41-47 42-48 42-49 44-50 47-52 52-53 51-54 53-55 55-56 55-58 52-59 57-60 58-61 +0-0 2-1 2-2 5-3 7-5 9-6 9-7 8-8 10-9 12-10 13-11 14-12 16-13 17-14 17-15 18-16 +0-0 13-1 2-2 13-3 14-4 7-5 8-6 5-7 18-8 4-9 11-10 12-11 16-12 16-13 19-14 21-15 22-16 22-17 25-18 25-19 26-20 +4-0 2-1 6-3 4-4 34-5 6-6 9-7 8-8 8-9 12-10 13-11 13-12 13-13 12-14 16-15 15-16 18-17 19-18 20-19 25-20 25-21 22-22 27-23 23-24 27-25 24-26 29-27 30-28 31-29 32-30 34-31 35-32 37-33 37-34 38-35 +0-0 1-1 2-2 3-3 6-4 6-5 9-6 9-7 +0-0 1-1 2-2 4-3 4-4 3-5 8-6 8-7 12-8 11-9 10-10 15-11 8-12 15-13 16-14 15-15 16-16 17-17 23-18 19-19 22-20 24-21 25-22 26-23 24-24 20-25 25-26 25-28 +0-0 2-1 3-2 6-3 10-4 7-5 8-6 9-7 9-8 9-9 11-10 13-11 14-12 16-13 16-14 20-15 19-16 22-17 24-18 24-19 23-21 27-22 26-23 30-24 31-25 25-26 34-27 37-28 31-29 39-30 27-31 39-32 35-33 39-34 38-35 44-36 41-37 41-39 45-41 +0-0 1-1 4-2 3-3 1-4 4-5 4-6 8-7 11-8 8-9 13-10 10-11 9-13 21-14 8-15 7-16 14-17 19-18 19-19 21-20 23-22 24-23 25-24 25-26 27-27 28-28 31-29 33-31 31-32 33-33 30-34 41-35 31-36 32-37 33-38 38-39 38-40 43-41 44-43 44-44 44-45 44-46 51-47 47-48 48-49 51-51 +0-0 1-1 3-2 3-3 5-4 8-5 10-6 9-7 13-8 14-9 17-10 16-11 15-12 17-13 22-14 19-15 19-16 21-17 20-18 28-19 29-20 30-21 25-22 24-23 31-24 +0-0 1-1 2-2 4-3 3-4 6-5 6-6 7-7 7-8 13-9 7-10 12-11 16-12 15-13 14-14 17-15 19-16 9-17 20-18 24-19 23-20 25-21 26-22 27-23 30-24 31-25 31-26 +13-0 1-1 2-2 3-4 6-5 7-6 8-7 12-8 13-9 10-10 11-11 16-12 11-13 17-14 17-15 18-16 18-17 17-18 21-19 20-20 23-21 24-22 25-23 25-24 23-25 27-27 +0-0 2-1 1-2 4-3 3-4 4-5 6-6 15-8 7-9 10-10 9-12 3-13 15-14 17-15 15-16 15-17 21-18 20-19 11-20 14-22 16-23 25-24 24-25 42-26 25-27 26-28 24-29 27-30 28-31 29-32 30-33 31-34 32-35 33-36 35-37 35-38 39-39 40-40 41-41 39-42 42-43 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 7-7 6-8 7-9 9-10 10-11 11-12 15-13 13-14 12-15 13-16 14-17 16-18 16-19 17-20 +0-0 2-1 1-2 7-3 3-4 9-5 12-6 14-7 14-10 13-11 17-12 18-13 19-14 17-15 4-16 20-21 20-22 21-23 23-24 24-25 25-27 26-28 27-29 28-30 36-31 32-32 33-33 38-34 40-35 32-36 39-37 37-38 34-39 34-40 57-41 43-42 47-43 49-44 46-45 47-46 48-47 50-49 51-50 52-51 52-52 57-54 55-56 59-57 59-58 62-59 63-61 +2-0 1-1 2-2 3-3 5-4 6-5 8-6 7-7 9-8 10-9 11-10 12-11 13-12 17-13 18-14 19-15 20-16 19-17 22-18 16-19 30-20 29-21 30-22 26-23 27-24 22-25 26-26 31-27 32-28 38-29 34-30 35-31 35-32 37-33 35-35 43-36 43-37 43-38 43-39 +0-0 1-1 2-2 5-3 5-4 5-5 5-6 4-7 10-8 10-9 11-10 11-11 11-12 10-13 7-14 7-15 15-16 17-17 18-18 19-19 19-20 23-22 24-23 23-24 21-25 25-26 26-27 30-28 30-29 30-30 31-31 31-33 38-34 45-36 38-37 39-38 38-39 41-40 44-41 36-42 43-43 45-44 +5-0 2-1 4-2 4-3 5-4 6-5 9-6 12-7 13-8 13-9 19-10 16-11 17-12 21-14 20-15 34-16 22-17 31-18 26-19 27-20 32-21 40-22 28-23 33-24 39-25 34-26 38-27 41-28 42-29 +0-0 3-1 2-2 6-3 6-4 6-5 8-6 11-7 9-8 13-9 16-10 13-11 17-12 18-13 21-14 20-15 21-16 23-17 24-18 +0-0 1-1 5-2 6-3 7-4 5-5 10-7 11-8 13-9 13-10 17-11 17-12 17-13 18-14 20-15 21-17 19-18 21-19 24-20 24-21 25-22 27-24 24-25 30-26 34-27 31-29 32-31 36-32 33-33 36-34 +1-0 3-1 4-2 5-4 6-5 7-6 9-7 9-8 13-9 13-10 14-11 +0-0 2-1 2-2 3-3 4-4 5-6 7-7 8-8 9-9 10-10 7-11 10-12 11-13 12-14 15-15 16-16 18-17 16-18 19-19 16-20 21-21 17-22 20-23 17-24 21-25 22-26 +0-0 1-1 1-2 3-3 4-4 5-5 4-6 9-7 9-8 2-9 3-10 4-11 3-12 15-13 2-14 9-15 16-16 8-17 16-18 10-19 12-20 17-21 14-22 16-23 15-24 17-25 17-26 18-27 20-28 21-29 22-30 24-31 21-32 25-33 21-34 27-35 26-37 28-38 30-39 30-41 30-42 31-43 32-44 35-47 36-48 39-49 34-50 33-52 40-53 40-54 41-55 41-56 42-57 30-58 43-59 44-60 46-61 45-62 48-64 49-66 +0-0 3-1 3-2 4-3 5-4 6-5 7-6 9-7 5-8 10-9 11-10 12-11 11-12 14-13 16-14 18-15 24-16 17-17 17-18 21-19 23-20 23-21 22-22 26-23 28-24 29-25 31-26 32-27 33-28 30-29 34-30 34-31 +0-0 1-1 2-2 3-3 3-4 5-5 6-6 25-8 8-9 11-10 8-11 13-12 14-13 10-14 16-15 16-16 17-17 9-18 20-19 17-20 30-21 19-22 28-25 27-26 28-28 29-29 11-30 12-31 15-32 31-33 33-34 34-35 +0-0 3-1 1-2 3-3 3-4 4-5 5-6 8-7 9-8 9-9 9-10 6-11 11-12 12-13 13-14 14-15 14-16 17-17 15-18 17-19 15-20 19-21 17-22 21-23 +1-0 2-1 0-2 3-3 3-4 9-5 4-6 7-7 11-8 10-9 11-10 14-11 11-12 12-13 17-14 16-15 18-16 17-17 19-18 15-19 20-20 20-21 24-23 +0-0 0-1 3-2 8-4 10-5 8-6 18-8 23-9 20-11 21-12 22-13 22-14 21-15 27-17 29-18 +1-0 5-1 3-2 7-3 8-4 9-5 8-6 10-7 8-8 8-9 11-10 11-11 14-12 15-13 16-14 14-15 18-16 +0-0 1-1 4-2 3-3 4-4 5-5 7-6 7-7 9-8 10-9 10-10 11-11 +0-0 7-1 6-2 4-3 5-4 3-5 13-6 6-7 15-8 14-9 14-10 10-11 12-12 16-13 20-15 24-17 25-18 23-19 22-20 27-21 28-22 29-23 23-24 30-25 +0-0 1-1 2-2 3-3 4-4 5-5 8-6 8-7 9-8 10-9 9-10 11-11 17-12 10-15 17-16 16-17 17-18 15-19 19-20 25-21 23-22 23-23 24-24 25-25 21-27 28-28 29-29 29-30 31-31 33-32 35-34 36-35 31-36 38-37 32-38 26-39 40-40 +0-0 1-1 3-2 3-3 4-4 5-5 7-6 13-8 11-9 13-11 14-12 15-13 15-14 14-15 8-16 16-17 15-18 21-19 18-20 +0-0 3-1 3-2 2-3 5-4 5-5 5-6 6-7 4-8 11-9 11-10 10-11 15-12 11-13 12-14 13-15 13-16 18-18 19-19 17-20 20-21 19-22 20-23 +1-0 +0-0 3-1 1-2 2-3 3-4 6-5 7-6 5-7 1-8 7-9 6-10 7-11 9-12 12-13 11-15 15-17 15-18 16-19 16-20 17-21 18-22 22-23 17-24 21-25 25-26 23-27 27-28 23-29 21-30 27-31 20-32 27-33 28-34 +1-0 +0-1 3-2 3-3 5-4 7-5 8-6 8-7 10-9 12-10 13-12 16-13 16-14 18-15 17-16 21-17 20-18 17-19 21-20 25-21 26-22 27-23 27-24 30-25 28-26 31-27 +0-0 1-1 +0-0 1-1 2-2 3-3 3-4 5-5 9-6 4-7 8-8 11-9 12-10 11-11 13-12 14-13 16-14 19-15 20-16 22-17 13-18 21-19 22-20 24-21 23-22 25-23 +0-0 +0-0 1-1 5-2 3-3 7-4 4-5 6-6 6-7 8-8 10-9 12-11 19-13 16-14 16-15 16-16 21-17 20-18 18-19 23-20 20-21 11-22 21-23 26-25 23-26 28-27 29-28 28-29 31-30 +0-0 1-1 2-2 2-3 2-4 3-5 4-6 6-7 6-8 +0-0 3-1 2-3 2-4 5-5 1-6 5-7 6-8 6-9 6-10 6-11 7-12 6-13 11-14 12-15 13-16 14-17 14-18 14-19 16-21 +0-0 1-1 2-2 1-3 4-4 4-5 5-6 6-7 6-8 7-9 8-10 9-11 12-12 11-13 15-14 12-15 14-16 15-17 16-18 16-19 17-20 +0-0 3-1 3-2 6-3 7-4 8-5 +1-0 7-1 3-2 12-3 3-4 10-5 8-6 12-7 11-8 13-9 22-10 19-12 20-13 20-14 23-15 +0-0 0-1 3-2 3-3 6-4 8-5 9-6 12-7 13-8 +0-0 1-1 1-2 5-3 4-4 6-5 8-6 7-7 13-8 9-9 9-10 14-11 15-12 15-13 13-14 20-15 7-16 21-17 24-19 25-20 24-21 22-22 27-23 26-24 27-25 29-26 30-27 28-28 31-29 34-30 34-31 35-32 +0-0 1-1 1-2 2-3 4-4 6-5 5-6 6-7 6-8 8-9 8-10 7-11 8-12 11-13 7-14 12-15 +2-0 1-1 7-2 4-3 4-4 8-5 8-6 9-7 10-8 14-9 14-10 15-11 12-12 11-13 19-14 +0-0 1-1 2-2 7-4 5-5 7-6 13-7 12-8 15-10 12-11 12-12 16-13 17-14 20-15 19-16 24-17 24-18 30-19 28-20 26-21 32-23 33-24 33-25 30-26 33-28 37-29 38-30 38-31 38-32 41-34 46-35 46-36 46-37 47-38 +0-0 1-1 2-2 1-3 3-4 3-5 4-6 5-7 4-8 6-9 7-10 +3-0 2-1 2-2 2-3 3-4 8-5 5-6 8-7 7-8 8-9 9-10 10-11 11-12 12-13 13-14 15-15 16-16 16-17 19-18 20-19 19-20 20-21 18-22 17-23 22-24 +0-0 4-1 1-2 4-3 3-4 3-5 14-7 5-8 14-9 7-10 15-11 8-12 8-13 8-14 10-15 15-16 12-17 17-18 14-19 15-20 16-21 17-22 18-23 19-24 20-25 19-26 23-27 28-28 27-29 28-30 31-31 27-32 31-33 32-34 33-35 34-36 37-37 36-38 37-40 45-41 36-42 38-43 43-44 40-45 32-46 45-48 46-49 47-50 39-51 47-53 +23-0 24-1 25-2 3-3 4-4 6-5 8-6 19-7 9-8 8-9 7-10 8-11 8-12 13-13 13-15 16-16 20-18 13-20 20-21 20-22 19-23 20-24 13-25 19-26 26-27 19-28 26-29 26-30 27-31 28-32 28-33 32-34 26-35 34-36 37-37 37-38 35-39 37-40 37-41 38-42 39-43 40-44 41-45 40-46 39-47 30-48 44-49 +1-1 2-2 3-3 4-4 5-5 6-6 8-7 10-8 7-9 10-10 12-11 11-12 11-13 12-14 14-15 16-16 17-17 18-18 19-19 19-20 21-21 22-22 19-23 23-24 23-25 24-26 28-27 28-28 27-29 26-30 29-31 +0-0 4-1 7-2 9-3 10-4 10-5 3-6 14-8 8-10 13-11 13-12 14-13 19-14 18-15 20-16 21-17 22-18 24-19 25-20 26-21 26-22 27-23 38-24 39-25 34-27 42-28 35-30 36-31 40-32 36-33 36-34 45-35 45-36 +0-0 1-1 2-2 2-3 3-4 3-5 6-6 7-7 10-8 8-10 10-11 7-12 12-13 10-14 10-15 5-16 14-17 15-18 15-19 17-20 16-21 15-23 26-24 30-25 25-26 26-27 25-28 29-29 27-30 28-31 26-32 29-33 30-34 +0-0 2-1 3-2 4-3 6-4 7-5 8-6 8-7 10-9 10-10 13-11 14-12 12-13 15-14 16-15 18-16 19-18 18-19 21-20 22-21 19-22 18-23 24-24 26-25 27-26 27-27 28-28 30-29 30-30 33-31 30-32 34-33 +0-0 1-1 2-2 6-3 9-4 5-5 8-7 10-8 11-9 11-10 12-11 13-12 16-13 14-14 16-15 17-16 18-17 20-19 23-20 22-21 23-22 24-23 29-24 32-25 29-26 23-27 34-28 32-29 28-30 29-31 36-32 37-33 39-34 40-35 41-36 42-37 43-38 +2-0 8-1 2-3 8-4 13-5 3-6 0-7 12-8 4-9 5-10 12-12 16-13 13-14 21-15 22-16 22-17 25-22 28-23 29-24 31-25 31-26 25-27 34-28 36-30 40-31 42-33 42-34 42-35 45-37 +0-0 1-1 2-2 5-4 8-5 7-6 7-7 8-8 8-9 10-10 13-11 14-13 16-14 20-15 21-16 21-17 29-19 23-20 24-21 19-23 27-25 24-26 30-27 32-28 +0-0 1-1 1-2 3-3 2-4 2-5 3-6 5-7 14-8 12-9 8-10 12-11 12-12 12-13 14-14 14-16 12-17 22-19 16-20 17-21 22-22 26-24 23-25 23-26 23-27 28-28 28-29 28-30 30-31 32-32 32-33 30-34 32-35 34-37 39-39 39-41 37-42 34-43 42-45 43-46 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 9-8 8-9 9-10 11-11 12-12 13-13 14-14 15-15 16-16 17-17 18-18 19-19 +1-2 2-3 3-4 12-5 4-6 5-7 6-8 8-10 8-11 17-12 25-13 11-14 12-15 20-16 20-18 21-19 22-20 16-21 24-22 25-23 29-24 35-25 35-26 32-27 41-29 16-30 36-31 31-32 57-35 44-36 41-38 51-39 47-40 52-41 50-42 49-43 51-44 54-45 53-46 54-47 56-48 57-49 +0-0 2-2 1-3 4-4 5-5 4-6 7-7 7-8 8-9 9-10 11-11 4-12 12-13 13-14 23-15 15-16 13-17 17-18 19-19 18-20 20-21 19-22 22-23 22-24 24-25 +0-0 1-1 3-2 2-3 2-4 9-5 4-6 11-7 7-8 3-9 4-10 11-12 11-13 11-14 16-15 19-16 17-17 18-18 18-19 20-20 19-21 18-22 22-23 22-24 22-25 27-26 27-29 29-30 30-31 31-32 32-33 30-34 35-35 37-36 37-37 38-38 29-39 34-40 33-41 32-42 36-43 39-44 +3-0 3-1 1-2 0-3 4-4 5-5 6-6 7-7 8-8 6-9 10-11 11-12 12-13 13-14 11-15 14-16 16-17 18-18 20-19 17-20 18-21 17-22 18-23 21-24 24-26 22-27 25-28 24-29 26-30 24-31 26-32 26-33 25-34 29-35 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 8-10 11-11 11-12 13-13 15-14 14-15 16-16 17-17 17-18 18-19 21-22 21-23 21-24 23-25 25-26 26-27 27-28 28-29 29-30 29-31 30-32 +1-0 2-1 5-3 6-4 6-5 6-6 7-7 8-8 14-9 11-10 10-11 13-12 12-13 11-14 17-15 18-16 19-17 18-18 24-19 22-20 21-21 23-22 23-23 25-24 26-25 27-26 +0-0 1-1 3-3 5-4 6-5 14-7 8-8 13-10 7-11 13-12 20-13 0-14 7-15 13-16 22-17 14-18 22-19 22-20 24-21 23-22 21-23 25-24 27-25 28-26 25-27 29-28 31-29 35-30 33-31 36-34 30-35 39-36 +0-0 1-1 2-2 3-3 4-4 4-5 6-6 8-7 8-8 9-9 9-10 13-11 11-12 12-13 13-14 14-15 15-16 16-17 17-18 11-19 18-20 20-21 20-22 21-23 40-24 25-26 25-27 25-28 29-29 31-30 32-31 32-33 35-34 33-35 33-36 32-37 39-38 35-39 38-40 41-41 39-42 39-43 39-44 43-45 41-46 44-47 46-48 44-49 46-50 48-51 48-52 +6-1 3-2 4-3 5-4 2-5 9-6 0-7 8-8 9-9 11-10 11-11 11-12 12-13 11-14 19-15 19-16 19-17 19-18 19-19 22-20 22-21 22-22 31-23 31-24 32-26 28-27 34-28 28-29 34-30 32-31 37-32 28-33 39-35 40-36 38-37 40-38 42-39 42-40 42-41 +0-0 0-1 2-2 0-3 3-4 4-5 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 9-8 5-9 10-10 10-11 10-12 11-13 20-15 17-16 19-17 19-18 24-19 16-20 21-21 19-22 25-23 26-24 29-25 30-26 31-27 30-28 30-29 +1-0 1-2 1-3 10-4 5-5 4-6 62-7 5-8 10-9 5-10 10-11 12-12 11-13 12-14 5-15 14-16 18-18 20-20 15-21 20-22 14-23 52-24 25-25 24-27 25-28 26-29 29-30 29-31 28-32 30-33 30-34 31-35 30-36 30-37 30-38 30-39 37-40 33-41 34-42 35-43 35-44 37-45 39-46 40-47 45-48 43-49 45-50 42-51 46-52 40-53 49-54 45-55 47-56 49-57 55-58 57-59 50-60 49-61 51-62 52-63 59-65 57-66 59-68 70-69 59-70 66-72 65-73 66-74 59-75 66-76 67-77 61-78 71-80 72-81 66-82 74-83 69-84 75-85 +0-0 1-1 2-2 6-5 7-6 9-7 9-9 7-10 22-11 15-13 13-14 12-15 15-16 16-17 12-18 17-19 18-20 22-21 18-22 19-23 19-24 23-25 23-26 27-27 25-28 26-29 22-30 24-31 26-32 27-33 36-34 33-35 35-36 35-37 31-38 27-39 39-40 34-41 36-42 41-43 38-44 35-45 42-47 40-48 43-49 45-50 40-51 47-52 47-53 50-54 51-55 52-56 50-58 51-59 54-60 55-61 55-62 56-63 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 7-7 8-8 12-9 13-10 18-12 11-13 14-15 21-16 21-17 12-18 22-19 23-20 24-21 24-22 25-23 25-24 20-25 11-26 10-27 28-28 27-29 28-30 25-31 31-32 25-33 34-34 28-35 29-36 32-37 28-38 36-39 37-40 38-41 37-42 38-43 36-44 40-45 +0-0 1-1 2-2 2-3 3-4 4-5 20-6 6-7 6-8 9-9 13-10 5-11 20-12 11-13 16-14 10-15 16-16 13-17 12-18 11-19 9-20 11-21 20-22 20-23 20-25 23-26 25-27 23-28 30-29 30-30 26-31 28-32 27-33 9-34 30-36 30-37 36-38 35-39 33-40 35-41 46-42 35-43 39-46 39-47 46-48 44-49 44-50 45-51 46-52 +1-0 2-1 3-2 5-3 4-4 10-5 10-6 10-7 10-8 11-9 12-10 13-11 40-13 23-14 16-15 16-16 20-17 21-18 23-19 27-20 27-21 23-22 24-23 36-24 36-25 30-26 54-27 31-28 34-29 33-30 34-31 35-32 36-33 36-34 40-35 43-36 43-37 45-38 54-39 45-40 46-41 47-42 48-43 52-44 53-45 56-46 +0-1 1-2 2-3 2-4 6-5 6-6 5-7 8-8 6-10 9-11 10-12 11-13 14-14 12-15 13-16 14-17 17-18 18-20 16-21 21-24 23-25 24-26 25-27 27-29 27-30 27-31 28-32 31-33 33-35 +1-0 1-1 4-2 5-3 5-4 3-5 8-6 17-7 12-8 7-9 18-11 15-12 14-13 14-14 16-15 19-16 17-17 18-18 20-19 22-20 22-21 24-22 26-23 25-24 27-25 29-26 30-27 33-28 32-29 34-30 +0-0 1-1 3-2 4-3 3-4 5-5 7-6 9-7 7-8 8-9 10-10 9-11 11-12 8-13 14-14 13-15 13-16 17-17 18-18 19-19 19-20 19-21 20-22 +0-0 3-1 3-2 6-3 7-4 8-5 10-6 11-7 13-8 13-9 16-10 16-11 +0-0 2-2 0-3 5-5 3-6 10-7 8-8 11-9 8-10 11-11 7-12 12-13 13-15 17-16 15-17 17-18 18-19 +0-0 0-1 1-2 3-3 4-4 6-5 5-6 6-7 6-8 7-9 7-10 8-11 9-12 7-13 11-14 11-15 11-16 12-17 14-18 15-19 +13-0 0-1 1-2 3-3 13-4 4-5 4-6 5-7 19-8 7-9 13-10 15-11 10-12 16-13 12-14 15-15 16-16 17-17 16-18 14-20 17-21 17-22 19-23 21-24 34-25 19-26 27-27 28-28 19-29 29-30 27-31 33-32 30-33 32-34 31-35 31-36 35-38 +0-0 1-1 4-2 8-3 3-4 10-5 8-6 9-7 19-8 16-9 14-10 17-11 20-12 14-13 22-14 25-15 24-16 25-17 27-18 28-19 17-20 18-21 31-22 31-23 32-24 +0-0 4-1 6-2 5-3 5-4 5-5 5-6 10-7 9-8 14-9 12-10 13-11 16-12 18-13 15-15 21-16 22-17 23-18 24-19 25-20 27-21 23-22 25-23 27-24 31-25 29-26 33-27 33-28 30-29 34-30 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 2-7 8-8 6-9 8-10 12-11 14-12 13-13 16-14 15-15 14-16 16-17 16-18 16-19 18-20 21-21 19-22 21-23 20-24 22-25 21-26 23-27 24-28 27-29 27-30 28-31 29-32 31-33 31-34 32-35 +0-0 1-1 2-2 4-3 3-4 5-5 7-6 6-7 9-8 9-9 10-10 21-11 11-12 13-13 12-14 18-16 17-17 37-18 19-19 22-20 21-21 25-22 24-23 25-24 26-25 31-26 29-27 32-28 33-29 34-30 35-31 34-32 37-33 +4-1 2-2 1-3 8-4 6-5 8-6 7-7 8-8 8-9 17-10 10-12 15-13 15-14 9-15 15-16 15-17 17-18 20-19 26-20 24-21 17-22 27-24 24-25 29-26 28-27 31-28 28-29 30-30 27-31 35-32 35-33 37-36 38-37 39-38 43-40 44-41 45-42 46-43 47-44 44-45 46-46 41-47 41-49 51-51 51-52 +2-1 18-2 3-3 5-4 5-5 7-6 7-7 7-8 11-9 9-10 10-11 10-12 11-13 13-14 15-15 23-16 16-17 19-18 21-19 19-20 20-21 21-22 20-23 14-24 30-25 35-27 27-28 28-29 32-30 32-31 31-32 32-33 25-34 37-35 39-36 42-37 41-38 44-39 41-40 45-41 36-45 49-46 51-48 53-49 57-50 54-51 56-52 47-53 56-54 54-55 59-57 +0-0 1-1 3-2 3-3 3-4 3-5 3-6 6-7 7-8 9-9 14-10 12-11 17-14 19-15 14-16 16-17 17-18 24-19 25-20 26-21 27-22 34-23 29-24 30-25 30-26 32-27 24-29 33-30 38-31 30-33 31-34 37-35 40-37 41-38 39-40 44-41 37-42 45-44 +0-0 1-1 2-2 3-3 5-4 5-5 7-6 7-7 7-8 10-9 11-10 12-11 7-12 8-13 13-14 +0-0 1-1 3-2 4-3 4-4 4-5 4-6 6-7 7-8 4-9 7-10 8-11 8-12 12-13 13-14 14-15 15-16 16-17 10-18 16-19 17-20 +1-0 1-1 4-2 3-3 6-4 8-5 9-6 +0-0 1-1 3-2 3-3 3-4 2-5 7-6 6-7 8-8 7-9 8-10 10-11 11-12 12-13 13-14 13-15 14-16 +0-0 1-1 5-2 6-3 2-4 3-5 3-6 6-7 9-8 6-9 10-10 11-11 11-12 13-13 6-14 7-15 15-16 16-18 20-19 17-20 22-21 23-22 21-23 25-24 26-25 28-26 31-27 31-28 24-29 26-30 27-31 29-32 30-33 32-35 +0-0 9-1 5-3 3-4 2-5 6-6 7-7 8-8 9-9 10-10 15-11 13-12 14-13 15-14 13-15 13-16 17-20 18-23 22-26 22-27 25-28 26-29 40-32 30-33 32-35 31-36 33-38 33-39 35-40 36-41 36-42 36-43 42-44 38-46 44-50 69-76 70-77 79-86 88-90 88-91 88-92 88-93 88-94 89-95 88-96 89-97 89-98 95-99 95-100 95-101 95-102 96-103 96-104 +0-0 0-1 3-2 2-3 1-4 3-5 3-6 4-7 3-8 5-9 7-10 6-11 7-12 7-13 +4-0 6-1 6-2 5-3 5-4 5-5 11-6 8-8 14-9 14-10 15-11 11-12 8-13 21-15 14-16 19-17 20-18 22-20 25-21 19-22 23-23 14-24 26-25 27-26 28-27 23-28 24-29 34-31 32-32 34-33 33-34 23-35 36-36 37-37 31-38 40-39 37-44 35-46 49-47 46-48 44-49 46-50 48-51 46-52 48-53 55-56 40-57 49-61 60-62 40-63 60-64 60-66 63-67 60-68 65-69 66-70 60-71 67-75 +0-0 1-1 3-2 2-4 13-5 10-6 16-7 7-8 8-9 6-10 2-11 9-12 17-13 18-14 10-15 8-16 17-17 10-18 12-19 17-20 16-21 17-22 17-23 18-24 26-25 24-26 25-27 26-28 27-29 28-30 26-31 33-32 35-33 37-34 32-35 32-36 36-37 38-38 39-39 40-40 28-41 34-42 37-44 44-45 42-46 42-47 47-48 44-49 44-50 43-51 43-52 43-53 55-55 53-60 55-61 +0-0 0-1 0-2 1-3 2-4 2-5 2-6 4-7 4-8 4-9 +0-0 1-1 2-2 4-3 5-4 6-5 14-6 6-7 7-8 9-10 12-11 13-12 14-13 12-14 15-15 16-16 17-17 17-18 16-19 20-21 21-22 16-23 25-24 23-25 21-26 26-27 26-28 28-29 26-30 27-31 31-33 32-34 27-36 33-37 29-38 33-39 37-41 38-42 39-43 39-44 40-45 42-47 42-48 43-49 44-50 +0-0 2-1 1-2 4-3 5-4 6-5 7-6 7-7 8-8 9-9 10-10 11-11 12-12 11-13 15-15 18-16 16-17 17-18 18-20 22-22 2-23 31-25 26-26 27-27 26-28 29-29 30-30 31-31 36-32 33-33 34-34 35-35 36-36 37-37 40-39 36-40 33-41 41-42 +0-0 1-1 2-2 3-3 3-4 2-5 4-6 7-8 8-9 8-10 10-11 11-12 11-13 13-14 14-15 15-16 13-17 14-18 19-19 23-20 20-21 21-22 23-25 25-26 24-29 25-30 31-31 21-32 30-33 31-34 30-35 31-36 32-37 +0-0 1-1 2-2 2-3 3-4 5-5 4-6 5-7 6-8 8-9 9-10 9-11 11-12 11-13 13-14 16-16 14-17 17-18 18-19 17-20 20-21 21-22 22-23 25-24 21-25 21-26 25-27 24-28 26-29 +0-0 1-1 3-2 5-3 5-4 6-5 7-6 9-7 9-8 8-9 14-10 11-11 15-12 16-13 17-14 18-15 19-16 20-17 21-18 +0-0 0-1 5-2 3-3 4-4 7-5 6-6 8-7 11-8 8-9 8-10 10-11 12-12 13-13 +0-0 8-1 2-2 3-3 3-4 5-6 5-7 8-8 5-9 10-10 5-11 21-13 15-14 17-16 21-17 19-18 15-19 15-20 14-21 21-22 26-24 19-25 26-26 26-27 26-29 30-30 25-31 30-32 28-33 30-34 31-35 30-36 30-37 35-38 41-39 41-40 38-41 41-42 45-43 41-44 41-45 54-47 47-48 47-49 49-50 50-51 49-52 47-53 49-54 52-55 57-56 57-57 49-58 55-59 56-60 59-62 38-63 +0-0 1-1 3-2 1-3 4-4 5-5 6-6 13-7 8-8 9-9 10-10 12-11 13-12 14-13 15-14 16-15 17-16 18-17 19-18 20-19 21-20 23-21 24-22 +0-0 3-1 1-2 2-3 5-5 6-6 22-7 7-8 8-9 10-10 11-11 15-12 13-13 12-14 17-15 13-16 17-17 18-18 18-20 19-21 20-22 19-23 20-24 23-25 24-26 +0-0 1-1 2-2 4-3 5-4 4-5 8-6 6-7 6-8 9-9 8-10 14-12 14-13 17-14 12-15 18-16 14-17 20-18 17-19 18-20 22-21 21-22 21-23 19-24 23-25 21-26 27-27 29-28 29-29 29-30 34-31 31-32 32-33 34-34 36-35 30-36 30-37 34-38 37-39 37-40 32-42 37-43 40-45 42-46 39-47 44-48 43-49 +1-0 3-1 3-2 4-3 4-4 8-5 9-6 8-7 8-8 12-9 11-10 13-11 14-12 15-13 +0-0 1-1 1-2 2-3 3-4 3-5 4-6 8-7 7-8 10-9 11-10 12-11 13-12 12-13 14-14 15-15 19-16 17-18 17-19 19-20 20-21 21-22 22-23 24-24 26-25 18-26 27-27 +1-0 1-1 1-2 3-3 7-4 3-6 7-7 4-8 5-9 6-10 9-11 10-12 9-13 10-14 11-15 12-16 13-17 14-18 15-19 15-20 17-21 18-22 19-23 21-24 22-25 21-26 25-27 22-28 23-29 24-30 26-31 27-32 27-33 28-34 +0-0 1-1 2-3 5-4 6-5 7-6 8-7 3-8 8-9 11-11 11-12 19-13 14-14 13-15 13-16 17-17 19-18 18-19 14-20 20-21 21-22 18-23 22-24 22-25 22-26 29-27 27-28 30-29 23-30 25-31 25-32 19-33 32-35 31-36 33-37 36-38 35-39 34-40 37-41 36-42 38-43 +0-0 1-1 2-2 4-3 5-4 5-5 6-6 9-8 9-9 11-10 9-11 14-12 12-13 16-14 17-15 15-16 17-17 22-18 21-19 21-20 22-21 22-22 24-23 22-24 24-25 25-27 29-28 27-29 30-30 29-31 29-32 30-33 31-34 36-35 34-36 35-37 37-38 50-39 38-40 40-41 41-42 42-43 42-44 45-46 46-47 47-48 48-49 49-50 48-51 51-52 52-53 53-54 +0-0 3-1 2-2 4-3 9-4 5-5 6-6 7-7 9-8 19-9 9-11 9-12 10-13 12-14 14-15 15-16 16-17 11-18 18-19 18-20 19-21 21-22 19-23 22-24 23-25 24-26 42-27 26-28 29-29 27-30 28-31 29-32 20-33 32-34 33-35 34-36 35-37 37-40 45-42 36-43 40-44 42-45 42-46 44-47 47-48 46-49 47-50 48-51 49-52 +0-0 1-1 5-2 3-3 4-4 4-5 7-6 5-7 9-8 8-9 5-10 8-11 11-12 10-13 13-14 10-15 14-17 16-18 16-19 18-20 19-21 15-22 18-23 18-24 19-25 20-26 23-27 23-28 21-29 23-30 23-31 25-32 25-33 +0-0 1-1 2-2 3-3 2-4 5-5 6-6 7-7 7-8 9-9 8-10 11-11 11-12 11-13 11-14 13-15 12-16 12-17 15-18 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 6-7 7-8 7-9 9-10 10-11 12-12 11-13 13-14 14-15 14-16 +2-0 1-1 2-2 3-3 6-4 4-5 7-6 8-7 11-8 13-9 12-10 12-11 9-12 10-13 24-14 17-15 17-16 18-17 17-18 19-19 21-20 18-21 18-22 16-23 18-24 24-25 27-27 52-31 29-32 30-33 28-34 32-35 32-36 34-37 41-38 34-39 34-40 35-41 34-42 37-43 40-44 45-45 43-46 46-48 46-49 48-50 51-51 40-53 52-55 +0-0 3-1 1-2 4-3 1-5 2-6 7-7 5-8 6-9 7-10 7-11 8-12 9-14 29-15 15-16 15-17 15-18 19-19 16-20 21-21 19-22 24-23 20-24 21-25 23-26 24-27 25-28 26-29 27-30 30-31 29-32 30-33 32-34 28-35 32-36 35-37 38-38 37-39 44-40 44-41 34-42 35-43 36-44 43-45 39-46 45-47 43-49 47-50 +0-0 3-1 2-2 8-3 9-4 7-5 12-6 8-7 10-8 14-9 15-10 16-11 14-12 19-14 18-15 26-16 19-17 20-18 24-19 23-20 25-21 7-22 26-23 27-24 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 7-7 7-8 10-9 11-10 12-11 13-12 13-13 +0-0 2-1 2-2 1-3 2-4 6-5 5-6 8-7 9-8 8-9 14-10 15-11 16-12 14-13 16-14 13-15 13-16 13-17 5-18 17-20 +0-0 1-1 3-2 4-3 4-4 6-5 7-6 9-7 +0-0 2-2 1-3 4-4 9-5 4-6 5-7 6-8 5-9 3-10 8-11 4-12 6-13 11-14 13-15 13-16 14-17 13-18 14-19 17-20 18-21 16-22 18-23 20-24 19-25 20-26 20-27 18-28 19-29 23-30 19-31 25-32 25-33 15-34 27-35 29-36 24-37 25-38 26-39 24-40 28-41 30-42 31-43 33-44 34-45 +0-0 1-1 2-2 6-3 5-4 8-5 6-6 8-7 9-8 10-9 14-10 11-11 12-12 22-13 23-14 22-15 24-16 25-17 26-18 27-19 28-20 28-21 31-22 35-23 34-24 35-25 37-26 41-27 41-28 41-29 43-30 +0-0 2-1 4-2 5-3 3-4 7-5 9-6 6-7 11-8 10-9 13-10 14-11 15-12 16-14 23-15 24-16 20-17 25-18 18-19 27-20 17-22 28-24 26-25 31-26 28-27 32-29 32-30 32-31 35-33 36-34 34-35 37-36 37-37 37-38 36-39 38-40 41-41 +1-0 1-1 2-2 12-3 9-4 7-5 10-6 12-7 7-8 8-9 7-10 14-11 14-12 19-13 18-14 20-16 17-17 22-18 23-19 24-20 23-21 25-22 25-23 28-24 30-25 23-26 29-27 32-28 24-29 30-30 31-31 34-32 36-33 37-34 38-35 39-36 +1-0 2-1 1-2 2-3 3-4 6-5 4-6 7-8 8-9 9-10 11-11 3-12 13-13 12-14 14-15 15-16 17-17 13-18 18-19 19-20 19-21 20-22 +0-0 0-1 3-2 3-3 4-4 +1-0 2-1 2-2 3-3 5-4 9-5 5-6 9-7 11-8 12-9 13-10 +0-0 1-1 2-2 2-3 2-4 6-5 5-7 13-8 9-9 8-10 13-11 14-12 15-13 11-14 17-15 13-16 9-17 17-18 14-19 16-20 17-21 20-22 17-23 22-24 22-25 21-26 20-27 19-28 24-29 26-30 13-31 31-32 28-33 29-35 28-36 31-37 33-38 35-39 36-40 35-41 37-42 38-43 38-44 39-45 41-46 40-47 +1-0 3-1 5-3 5-4 4-5 6-6 6-7 6-8 7-9 9-10 12-11 9-12 10-13 14-14 13-15 12-16 15-17 17-18 21-19 16-20 23-21 18-22 20-23 19-24 20-26 25-29 22-30 25-31 26-32 17-34 33-35 30-36 29-37 32-38 27-39 31-40 19-41 33-42 35-43 36-44 +0-0 1-1 3-2 3-3 4-4 1-5 6-6 7-7 8-8 9-9 8-10 8-11 12-12 7-13 12-14 14-15 15-16 15-18 16-19 17-20 18-21 +0-0 2-1 5-2 4-3 6-4 +0-0 1-1 2-2 4-3 6-4 7-5 8-6 9-7 10-8 11-9 12-10 13-11 13-12 15-13 17-14 18-15 19-16 20-17 22-18 24-20 25-21 26-22 27-23 +0-0 2-1 2-2 4-3 5-4 7-5 8-6 9-7 10-8 11-9 12-10 +0-0 1-1 2-2 9-3 4-4 6-5 7-6 7-7 8-8 5-9 13-11 13-12 14-13 16-14 17-15 18-16 17-17 16-18 19-19 20-20 21-21 22-22 21-23 22-24 24-25 24-26 27-27 25-28 29-29 29-30 30-31 +1-0 4-1 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 8-7 8-8 11-9 10-10 14-11 14-12 15-13 16-14 17-15 18-16 20-17 21-18 19-19 25-20 25-21 26-22 23-23 29-25 30-26 31-27 30-28 31-29 28-30 33-32 35-33 34-34 37-35 +0-0 1-1 4-2 5-3 6-4 3-5 7-6 +0-0 1-1 3-2 4-3 6-4 6-5 8-6 6-7 7-8 11-9 4-10 9-11 16-12 14-13 15-14 16-15 17-16 13-17 12-18 19-19 +0-0 3-1 2-2 5-4 6-5 7-6 10-7 12-8 11-9 12-10 13-11 16-12 15-13 16-14 19-15 20-16 21-17 22-18 24-19 24-20 25-21 +0-0 1-1 1-2 4-3 3-4 4-5 6-6 7-7 8-9 11-10 11-11 12-12 13-13 +0-0 1-1 7-2 3-3 4-4 4-5 5-6 6-7 7-8 8-9 9-10 10-11 11-12 +0-0 1-1 4-2 3-3 4-4 3-5 11-6 5-7 6-8 9-9 6-10 16-11 15-12 15-13 15-14 15-15 15-16 12-17 23-19 20-20 21-21 29-22 25-23 29-24 24-25 25-26 31-29 32-30 30-31 35-32 31-33 31-34 36-35 34-36 37-37 41-38 42-39 38-40 40-41 40-42 40-43 43-44 +0-0 2-1 3-2 4-3 6-4 5-5 6-6 7-7 12-9 10-10 14-12 14-13 7-14 16-16 17-17 18-18 19-19 21-20 20-21 23-22 24-23 23-24 23-25 28-26 29-28 26-29 34-31 31-32 31-33 38-34 37-35 31-36 35-37 34-38 30-39 40-40 42-41 31-42 41-43 41-44 43-45 +1-0 1-2 2-3 3-4 7-5 7-6 6-7 10-8 12-9 12-10 +2-0 2-1 4-2 4-3 5-4 9-5 8-6 10-7 10-8 13-9 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 7-7 8-8 11-9 10-10 9-11 16-12 15-13 16-14 17-15 19-16 20-17 21-18 22-19 23-20 24-21 25-22 +0-0 1-1 3-2 0-3 2-4 5-5 2-6 5-7 8-9 6-10 6-11 5-12 6-13 6-14 11-15 7-16 10-17 8-18 8-19 10-20 16-22 16-23 15-24 15-25 15-26 16-27 15-28 18-29 18-30 19-31 +0-0 1-1 3-2 4-3 5-4 6-5 6-6 9-7 6-8 8-9 13-10 10-11 15-12 12-13 12-14 13-15 17-16 17-17 17-18 18-19 19-20 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 6-8 9-9 10-10 11-11 11-12 13-13 +0-0 1-1 3-2 4-3 19-4 8-5 8-6 9-7 10-8 11-9 12-10 14-11 15-12 16-13 19-14 19-15 20-16 +0-0 1-1 2-2 3-3 4-4 4-5 5-6 7-7 7-8 8-9 9-10 11-11 9-12 5-13 12-14 11-15 13-16 13-17 14-18 14-19 14-20 16-21 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 5-7 6-8 5-9 8-10 8-11 9-12 9-13 9-14 11-15 12-16 +0-0 1-1 2-2 3-3 7-4 6-5 4-6 5-7 5-8 8-9 6-10 8-11 9-12 10-13 11-14 5-15 12-16 13-17 15-18 15-19 16-20 17-21 +0-0 0-1 4-2 3-3 6-4 5-5 6-6 2-7 4-8 9-10 9-11 12-12 13-13 +0-0 1-1 2-2 4-3 5-4 5-5 7-6 8-7 9-8 9-9 10-10 +0-0 0-1 2-2 4-3 6-4 6-5 7-6 8-7 7-8 10-9 12-10 13-11 +0-0 1-1 3-2 3-3 5-4 6-5 10-6 8-7 7-8 8-9 10-10 13-11 14-12 14-13 15-14 16-15 +0-0 1-1 4-2 4-3 5-4 6-5 7-6 3-7 9-8 10-10 11-11 12-12 14-13 13-14 15-15 16-16 16-17 35-18 18-19 26-20 27-21 28-22 20-23 21-25 23-26 23-27 18-28 31-29 29-30 31-31 35-32 30-33 31-34 31-35 31-36 37-37 36-38 36-39 36-40 41-41 36-42 40-43 42-44 44-45 47-46 38-47 48-48 49-49 42-50 43-51 50-52 +0-0 0-1 2-2 4-3 18-5 9-6 7-7 2-8 8-9 12-10 12-11 10-12 12-13 12-14 17-15 18-16 19-17 13-18 13-19 19-20 22-21 24-22 22-23 23-24 34-25 27-26 29-27 20-28 9-29 33-31 27-32 30-33 11-34 30-36 30-37 45-39 35-41 38-42 38-43 40-44 41-45 42-46 41-48 45-49 46-50 47-52 50-53 48-54 49-55 50-56 50-57 51-58 55-61 56-62 57-63 +0-0 1-1 2-2 3-3 4-4 4-5 8-6 5-7 6-8 7-9 11-10 9-11 9-12 10-13 14-14 17-15 17-16 14-17 15-18 19-19 20-20 11-21 16-23 30-24 27-25 22-26 25-27 26-28 29-29 30-30 30-31 30-32 32-33 36-34 31-35 32-36 33-37 26-38 41-39 39-41 39-42 42-43 42-44 42-45 43-46 43-47 57-48 58-49 43-50 44-51 50-52 55-53 50-54 50-55 56-56 52-57 53-58 54-59 56-60 59-61 +0-0 1-1 5-2 2-3 2-4 6-5 4-6 5-7 7-8 8-9 10-10 11-11 12-12 13-13 14-14 15-15 15-17 15-18 19-19 26-20 27-21 25-22 20-23 21-24 19-25 23-26 24-27 20-28 28-31 30-32 31-33 31-34 29-35 32-36 33-37 35-40 41-41 40-42 39-43 30-44 41-46 45-47 45-48 43-49 44-50 45-51 49-52 47-53 49-54 50-55 49-56 50-57 49-58 54-59 +1-0 1-1 3-2 4-3 5-4 4-5 13-6 14-7 15-8 9-9 12-10 17-12 12-13 18-14 12-15 18-16 15-17 18-18 24-19 25-20 20-21 21-22 21-23 29-25 29-26 29-27 29-28 30-29 29-30 33-31 33-32 36-33 29-34 39-35 39-36 41-37 39-38 42-39 44-40 44-41 47-42 41-43 48-44 49-45 50-46 51-47 52-49 +16-0 29-1 1-2 11-3 1-4 3-5 4-6 2-7 9-8 13-10 12-11 16-13 16-14 3-15 18-16 15-17 22-18 23-19 25-20 26-21 27-22 28-23 30-24 24-25 26-26 20-27 48-29 40-30 35-31 32-32 33-33 38-34 36-35 39-36 40-37 46-39 42-40 43-41 44-42 47-43 45-44 40-45 48-46 +0-0 1-1 1-2 3-3 4-4 5-6 7-7 8-9 10-10 11-11 14-12 13-14 10-15 14-16 15-18 16-19 18-20 19-21 28-23 25-24 26-25 28-26 29-27 32-28 32-29 34-31 34-32 36-33 32-34 38-35 35-36 38-37 39-38 +0-0 12-1 1-2 3-3 3-4 4-5 7-6 8-7 13-8 7-9 27-10 7-11 17-12 18-13 11-14 15-15 16-16 15-17 21-18 22-19 19-20 6-21 24-22 23-23 23-24 24-25 24-26 26-27 27-28 23-29 31-30 28-31 31-32 33-33 35-34 19-35 35-36 +0-0 1-1 2-2 2-3 8-4 3-5 1-6 9-7 22-8 16-9 17-10 10-11 11-12 8-13 13-14 20-15 15-17 20-18 20-19 19-20 34-21 25-22 20-23 30-24 21-25 28-26 23-27 29-28 30-29 33-30 34-31 35-32 29-33 33-34 32-35 37-38 40-40 41-41 42-42 42-43 41-44 42-45 42-46 45-47 46-48 49-50 50-51 50-52 +0-0 2-1 3-2 3-3 2-4 5-5 6-6 9-7 10-8 11-9 12-10 13-11 8-12 8-13 6-14 8-15 14-16 10-19 16-20 22-21 18-22 18-23 21-24 22-25 26-26 22-27 19-28 14-29 28-30 18-33 25-34 30-35 36-36 28-37 30-38 38-39 39-40 33-41 33-42 34-43 33-44 41-47 38-48 41-49 41-50 35-52 42-53 43-54 44-55 +7-0 1-1 5-2 9-3 7-4 3-5 5-6 5-7 6-8 5-9 8-10 9-11 10-12 12-13 12-14 12-15 18-16 16-17 18-18 16-19 17-20 22-21 19-22 23-23 24-24 24-25 29-26 24-28 23-29 29-30 26-31 29-32 29-33 28-34 29-35 29-36 31-37 35-38 33-39 36-40 34-41 36-42 38-44 36-45 41-46 39-47 38-49 43-51 44-52 +0-0 2-1 1-2 4-4 7-5 5-6 10-7 5-8 6-9 10-10 14-11 12-12 12-13 20-15 22-16 22-17 22-18 18-19 24-20 23-21 27-22 26-23 28-24 29-25 30-27 32-28 34-30 33-32 36-33 37-34 +0-0 1-1 1-2 2-3 2-4 4-5 5-6 5-7 8-8 8-9 8-10 8-11 13-12 10-13 21-15 14-16 17-17 15-18 16-19 22-20 19-21 24-22 22-23 22-24 18-26 20-27 26-28 26-29 40-30 31-31 25-32 26-33 26-34 28-35 28-36 30-37 32-38 33-39 34-40 37-41 39-42 40-43 18-44 41-46 43-47 42-49 43-50 33-51 53-54 51-55 53-56 50-57 55-58 53-59 53-60 53-61 54-62 60-64 60-65 59-66 +0-0 1-1 2-2 3-3 2-4 6-5 7-6 8-7 9-8 10-9 10-10 13-11 17-12 18-15 20-16 16-17 8-18 19-20 18-21 22-22 21-23 22-24 28-25 24-26 26-27 14-28 26-29 24-30 27-31 32-32 29-33 32-34 32-35 27-36 34-37 38-38 36-39 37-40 38-41 39-42 42-43 44-44 41-45 41-46 44-47 47-48 46-49 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 7-8 10-9 10-10 11-11 15-13 10-14 15-15 16-16 21-17 15-18 19-19 20-21 13-22 18-23 26-25 23-27 23-28 25-29 26-30 27-31 28-32 30-33 30-34 35-35 36-36 28-37 33-38 33-39 38-40 37-42 42-43 43-44 39-45 43-46 40-48 42-49 44-50 45-51 +0-0 1-1 3-2 6-3 6-4 8-5 11-6 9-7 11-8 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 10-8 9-9 12-10 11-11 25-12 12-13 14-14 15-15 16-16 20-17 18-18 18-19 20-20 20-21 21-22 26-23 23-24 26-25 +0-0 2-1 2-2 3-3 5-4 12-5 12-6 12-7 11-8 12-9 12-10 12-11 12-12 15-14 17-15 18-16 18-17 20-18 25-19 23-20 22-21 23-22 26-23 26-24 25-25 27-26 28-27 29-28 26-29 32-30 33-31 34-32 35-33 41-34 37-35 42-36 39-37 40-38 50-39 46-40 42-41 42-42 52-43 51-44 49-45 48-46 44-47 54-48 54-49 +0-0 1-1 3-2 4-3 5-4 6-5 5-6 7-7 7-8 8-9 11-10 11-11 2-12 13-13 15-14 15-15 15-16 18-17 19-18 20-19 21-20 22-21 22-22 27-23 26-24 40-25 27-26 29-27 30-28 31-29 35-30 33-31 33-32 35-33 35-34 36-35 41-36 38-37 41-38 39-39 44-40 46-41 44-42 47-43 47-44 43-46 25-47 48-48 +0-0 5-1 2-2 3-3 4-4 6-5 5-6 9-7 8-8 6-9 10-10 11-11 9-12 12-13 15-15 15-16 8-17 20-18 22-19 19-21 19-22 26-23 26-24 26-25 27-26 29-27 30-28 31-29 34-31 35-32 30-33 33-34 38-35 39-36 33-37 35-38 39-39 42-40 44-41 45-42 42-43 42-44 46-45 49-46 52-47 52-48 30-49 55-50 51-51 54-52 55-53 50-55 56-56 57-57 61-58 59-60 61-61 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 10-7 10-8 10-9 10-10 12-11 14-12 14-13 17-15 19-16 20-17 20-18 24-20 23-21 25-22 26-23 28-24 27-25 30-26 31-27 29-28 32-29 33-30 35-31 35-32 36-33 37-34 +1-1 2-2 8-3 6-4 6-6 3-8 4-9 5-10 18-11 17-14 19-15 20-16 14-17 20-18 16-19 18-20 22-21 21-22 23-23 25-24 27-25 27-26 30-27 31-28 30-29 32-30 32-31 32-32 33-33 34-34 42-35 38-36 39-37 37-38 43-39 36-40 44-41 +0-0 3-1 2-2 11-3 12-4 14-5 5-6 11-7 17-8 17-9 7-10 10-11 10-12 15-13 14-14 6-15 15-16 6-17 18-18 19-19 +0-0 1-1 2-2 2-3 4-4 4-5 +0-0 0-1 1-2 3-3 3-4 6-5 7-6 7-7 8-8 9-9 13-10 14-11 13-12 15-13 15-14 15-15 17-16 19-17 +0-0 4-1 1-2 2-3 7-4 4-5 5-6 6-7 8-8 9-9 13-10 10-12 10-13 18-14 13-15 16-16 15-18 16-19 15-20 16-21 19-22 18-23 19-24 23-25 22-26 22-27 22-28 23-29 28-30 25-31 27-32 28-33 5-34 31-36 32-37 35-38 36-41 +0-0 1-1 2-2 3-3 5-4 4-6 8-7 7-8 10-9 11-10 12-11 15-12 12-13 15-14 16-15 17-16 19-17 21-18 21-19 20-20 21-21 22-22 26-23 25-24 29-25 26-26 30-27 +0-0 1-1 2-2 2-3 5-4 5-5 7-6 11-7 3-8 23-9 4-10 13-11 12-12 15-13 17-14 17-16 20-18 18-19 23-20 23-21 23-22 24-23 26-24 27-25 27-26 28-27 26-28 31-29 32-30 26-31 34-32 28-33 33-34 35-35 23-36 35-37 36-38 36-39 38-40 +0-0 3-1 5-2 6-3 4-4 4-5 18-6 3-7 10-8 0-9 11-10 12-11 15-12 16-13 17-14 18-15 19-16 20-17 21-18 22-19 25-20 26-22 25-23 26-24 27-25 26-26 29-27 30-28 31-29 +0-0 1-1 2-2 1-3 7-4 7-5 9-6 8-7 11-8 12-9 13-10 19-11 21-12 22-13 21-14 18-15 21-16 21-17 21-18 23-19 26-20 26-21 +1-0 4-1 3-2 3-3 5-4 6-5 4-6 8-7 12-8 13-11 11-12 4-13 14-14 14-16 15-17 15-18 18-19 19-20 18-21 21-22 25-23 22-25 23-26 24-27 25-28 26-29 27-30 28-31 30-32 31-33 32-34 +0-0 11-1 5-2 4-3 4-4 5-5 7-6 8-7 10-8 12-9 13-10 15-11 15-12 17-13 17-14 22-15 23-16 24-17 27-18 24-19 26-20 26-21 27-22 31-24 34-25 35-26 +2-0 1-1 0-2 4-3 5-4 7-5 9-6 9-7 12-8 13-9 14-10 17-12 18-13 20-14 20-15 21-16 22-17 +0-0 1-1 2-2 8-3 3-4 4-5 5-6 6-7 7-8 8-9 10-10 11-11 14-12 13-13 16-14 15-15 16-16 18-17 +0-0 1-1 3-2 4-3 +0-0 1-1 1-2 4-3 4-4 4-5 7-6 9-7 9-8 9-9 9-11 17-12 12-13 13-15 15-17 16-18 21-19 17-20 21-21 25-22 24-23 25-24 28-25 30-27 32-28 30-29 36-30 38-31 35-32 35-33 23-34 42-35 39-36 43-37 43-38 45-39 46-40 43-41 45-42 48-43 50-44 51-45 54-46 53-47 54-48 54-49 56-50 57-51 54-52 59-53 +0-0 1-1 2-2 3-3 7-4 5-5 6-6 10-8 9-9 11-10 12-11 14-12 15-14 16-15 17-16 18-17 18-18 24-19 23-20 24-21 25-22 26-23 27-24 28-25 28-26 30-27 +0-0 1-1 2-2 5-3 2-4 11-5 7-6 7-7 8-8 15-9 14-10 17-11 10-12 10-13 12-14 9-16 17-17 18-18 +2-1 1-2 2-4 6-5 7-6 10-7 11-8 12-9 12-10 14-11 14-12 16-13 17-14 22-15 23-16 +0-0 2-1 2-2 5-3 6-4 4-5 7-6 10-8 12-9 13-11 13-12 16-13 17-14 20-15 19-16 20-17 22-18 22-19 27-20 24-21 25-22 26-23 27-24 30-25 31-26 30-27 32-28 34-29 35-30 35-31 36-32 39-33 38-34 41-35 43-36 44-37 45-38 46-39 47-40 +1-0 1-1 5-2 4-3 6-4 5-5 7-6 8-7 +0-0 1-1 2-2 1-3 3-4 4-5 6-6 5-7 8-8 8-9 9-10 10-11 10-12 11-13 13-14 15-15 13-16 17-17 18-18 14-19 17-20 18-21 20-22 20-23 20-24 21-25 +0-0 1-1 2-2 4-3 4-4 3-5 4-6 5-7 6-8 7-9 8-11 9-12 12-13 12-14 12-15 13-16 13-17 15-18 16-19 15-20 17-21 17-22 18-23 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 5-7 6-8 6-9 7-10 9-11 9-12 10-13 10-14 11-15 12-16 13-17 14-18 +0-0 1-1 10-2 3-3 5-4 6-5 7-6 8-7 9-8 11-9 15-10 10-11 16-12 14-13 18-14 23-15 20-16 21-17 22-18 21-19 25-20 24-21 18-22 26-23 +0-0 1-1 2-2 4-3 4-4 +0-0 2-1 2-2 2-3 3-4 5-5 6-6 7-7 8-8 9-9 9-10 10-11 12-12 12-13 15-14 15-15 +0-0 8-2 3-5 7-6 9-7 1-8 11-9 5-10 13-11 10-12 12-13 6-14 15-15 12-16 16-17 21-19 16-20 22-21 22-22 19-23 19-24 24-25 24-26 26-27 27-28 28-29 28-30 30-31 +0-0 1-1 2-2 2-3 7-4 6-5 8-6 6-7 7-8 9-9 13-10 10-11 16-12 24-13 12-14 17-16 17-17 20-18 21-19 24-20 22-21 24-22 27-23 25-24 27-25 28-26 29-27 31-28 32-29 34-30 34-31 35-32 35-33 36-34 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 5-7 9-8 10-9 10-11 10-12 11-13 13-14 33-15 15-16 20-17 11-19 18-20 19-21 20-22 27-23 28-24 20-25 24-27 25-28 30-29 35-30 33-32 36-33 17-34 36-36 36-37 39-38 39-40 41-41 44-42 45-44 45-45 46-46 47-47 48-48 50-49 51-50 50-51 55-52 56-53 54-54 61-55 63-57 55-59 67-60 67-65 69-66 71-67 70-68 67-69 71-70 52-71 72-72 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 6-7 10-8 9-9 10-10 6-12 11-13 12-14 12-16 14-17 15-18 22-19 18-21 20-22 20-23 19-24 21-25 23-27 24-28 25-29 26-30 27-31 29-32 31-33 32-34 32-35 36-36 35-37 35-38 39-39 39-40 37-41 35-42 29-44 41-45 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 5-8 12-9 10-10 12-11 14-12 17-14 18-15 19-16 20-17 23-18 22-19 24-20 25-21 26-22 27-23 28-24 +0-0 3-1 5-2 3-3 2-4 10-5 11-6 12-7 7-9 13-10 14-11 15-12 15-13 15-14 18-15 20-17 21-18 28-19 23-20 24-21 28-22 29-23 29-24 31-25 32-26 +0-0 1-1 3-3 3-4 5-5 10-6 10-8 8-9 11-10 12-11 12-12 13-13 14-14 15-15 16-16 17-17 17-18 19-19 22-20 22-21 23-22 24-23 25-24 25-25 26-26 32-27 22-28 29-29 32-30 20-31 51-32 35-33 33-34 34-35 42-36 25-37 35-38 37-39 43-40 44-41 44-42 45-43 44-44 50-45 50-46 36-47 51-48 +0-0 1-1 2-2 3-3 4-4 6-5 11-6 11-7 11-8 11-9 11-10 15-12 15-13 5-14 5-15 15-16 30-17 24-18 21-19 26-20 24-22 28-24 30-26 34-27 36-29 36-31 33-32 34-33 36-34 38-35 38-36 41-37 40-38 41-39 42-40 41-41 44-42 45-43 43-44 46-45 48-46 52-47 53-48 54-50 55-51 50-52 60-53 61-54 60-55 63-56 65-57 62-58 64-60 +0-0 3-1 9-2 2-3 4-4 5-5 5-6 7-7 8-8 7-9 8-10 9-11 10-12 9-13 11-14 14-15 15-16 16-17 14-18 18-19 19-20 17-21 18-22 18-23 22-24 23-25 24-26 25-27 20-28 20-29 27-30 26-31 27-32 29-34 33-35 34-36 34-37 32-38 37-39 38-41 38-42 39-43 40-44 40-45 37-46 39-47 44-48 37-49 41-50 42-51 43-52 45-53 45-54 46-55 47-56 43-57 48-58 +0-0 1-1 2-2 4-3 3-4 7-5 5-6 6-7 9-8 10-9 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 +0-0 1-1 2-2 3-3 6-4 5-5 6-6 7-7 9-8 10-9 11-10 12-11 13-12 16-13 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 10-9 10-10 12-11 12-12 14-13 13-14 +7-0 0-1 1-2 5-4 5-5 3-6 2-7 4-8 4-9 10-10 7-11 17-12 10-13 11-14 12-15 13-16 7-17 8-18 15-19 14-20 36-21 19-23 19-24 25-26 25-30 25-31 27-32 27-33 27-34 29-35 29-36 29-37 32-38 28-39 36-40 33-41 34-42 36-43 36-44 37-46 38-47 39-48 40-49 41-50 43-51 42-52 51-53 44-54 45-55 46-56 47-57 47-58 48-59 47-60 51-61 52-62 50-63 52-64 53-65 54-66 54-67 47-68 58-69 58-70 57-71 +0-0 1-1 4-2 3-3 4-4 4-5 8-6 7-8 9-9 10-10 16-11 14-13 14-16 14-17 16-18 15-19 18-20 19-21 20-22 21-23 16-24 21-25 21-26 21-27 25-28 35-29 28-31 28-32 28-33 28-34 29-37 36-38 35-39 35-40 38-41 38-42 29-43 39-44 40-46 44-47 44-48 42-51 44-52 45-53 51-54 49-55 51-57 44-58 51-60 56-61 68-62 53-63 57-64 58-65 59-66 60-67 62-68 62-69 55-70 63-72 63-73 67-74 67-75 68-76 69-77 72-78 71-79 +0-0 2-1 2-2 6-4 5-5 6-6 10-7 5-8 10-9 5-10 5-11 45-12 13-13 14-14 16-15 14-16 15-17 15-18 17-19 19-21 20-23 24-24 21-25 24-26 29-27 32-28 29-29 26-30 27-31 29-32 33-33 32-34 29-35 31-36 33-37 38-38 34-39 35-40 40-41 41-42 53-43 39-45 47-46 53-47 45-48 46-49 47-51 51-52 48-53 47-54 49-55 51-56 60-57 54-58 60-59 56-60 60-61 63-62 59-63 60-64 61-65 64-66 65-67 66-68 67-69 69-70 70-72 70-73 74-74 70-75 67-77 76-78 +2-0 1-1 1-2 3-3 4-4 4-5 5-6 6-7 7-8 9-9 8-10 9-11 12-12 13-13 14-14 +0-0 2-1 1-3 4-4 4-5 5-6 7-7 8-8 9-9 14-10 11-11 12-12 13-13 15-14 16-15 14-16 18-17 19-18 20-19 21-20 21-21 24-22 23-23 +1-1 4-2 5-3 5-4 19-5 8-7 8-8 12-9 9-10 11-11 12-12 12-13 13-14 21-15 16-16 17-17 18-18 20-19 22-20 23-21 31-22 20-23 42-24 26-25 29-28 31-29 32-30 40-31 38-32 44-33 42-34 44-35 +0-0 1-1 3-2 17-3 3-4 8-5 9-6 10-7 13-8 12-9 16-10 11-11 12-12 14-13 15-14 15-15 17-16 24-17 23-18 25-19 21-20 25-21 29-23 26-24 26-25 31-26 31-27 33-28 34-29 35-30 36-31 37-32 38-33 39-34 42-36 40-37 41-38 +2-0 4-1 6-2 4-3 2-4 1-5 8-6 9-7 10-8 13-9 11-10 13-12 14-14 19-15 20-16 18-17 21-18 18-19 23-21 22-22 24-23 25-24 32-25 30-26 29-27 30-28 33-29 32-30 32-31 33-32 34-33 38-34 42-35 42-36 40-37 38-38 42-39 44-40 44-41 44-42 47-43 48-44 +1-2 3-3 4-4 5-5 6-6 8-7 9-8 11-9 12-10 13-11 14-12 15-13 21-17 21-18 5-19 25-20 30-21 27-22 24-23 5-24 28-25 31-27 33-28 35-29 31-30 35-31 41-34 37-35 51-36 41-37 45-38 42-39 45-41 45-42 46-43 49-46 50-47 52-48 54-49 58-50 58-51 59-53 59-54 64-55 65-56 66-57 69-60 69-61 71-62 73-63 75-64 75-65 74-66 +0-0 0-1 2-2 3-3 4-4 9-5 6-6 7-7 7-8 9-10 10-11 11-12 12-13 12-14 14-15 15-16 16-17 19-18 19-20 18-21 13-22 22-23 23-24 21-25 24-26 24-27 25-28 +0-0 1-1 2-2 3-3 +0-0 0-1 3-2 3-3 3-4 4-5 7-6 6-7 7-8 7-9 8-10 9-11 11-12 12-13 13-14 14-15 15-17 17-19 18-20 10-21 19-22 +0-0 1-1 2-2 5-3 4-4 5-5 7-6 7-7 10-8 12-10 12-11 14-12 12-13 20-14 21-15 16-16 12-17 23-19 24-20 25-21 28-22 26-23 27-25 28-26 29-27 30-28 32-29 31-30 +0-0 2-3 2-4 2-5 2-6 3-7 6-8 5-9 9-10 8-11 16-12 10-13 12-14 16-15 14-17 16-19 15-20 17-21 2-22 25-23 26-24 25-25 24-26 18-27 24-28 20-29 42-30 18-31 24-32 27-34 34-35 31-36 37-39 38-40 36-41 35-42 32-44 36-46 41-47 42-49 43-50 46-52 +0-0 1-1 4-2 4-3 3-4 6-5 7-6 5-7 9-8 10-9 11-10 13-11 15-12 14-13 +0-0 13-1 4-2 5-3 6-4 7-5 13-6 13-7 7-8 13-9 13-10 12-11 14-12 13-13 10-14 15-15 15-16 15-17 22-18 26-19 20-20 17-21 31-22 28-24 28-25 29-26 23-27 23-28 32-29 32-30 25-31 32-32 33-33 36-34 35-35 36-36 37-37 38-38 39-39 32-40 41-41 42-42 36-43 53-44 45-45 47-47 48-48 50-50 51-51 53-52 52-53 47-54 53-55 55-56 56-57 52-58 53-59 58-60 60-61 61-62 62-63 64-64 64-65 63-66 +0-0 0-1 1-2 3-3 2-4 4-5 3-6 6-7 8-8 8-9 5-10 9-11 +0-0 1-1 2-2 3-3 7-4 6-5 8-6 11-7 13-8 13-9 16-10 17-11 14-12 15-13 17-14 18-15 22-16 26-17 29-18 30-19 32-20 28-21 31-22 35-23 32-24 37-25 40-26 32-27 21-28 39-29 36-30 41-31 +0-0 1-1 3-2 4-3 5-5 10-6 12-7 7-8 13-9 14-10 10-12 17-13 19-14 18-15 20-16 22-17 23-18 18-19 26-20 29-21 23-22 29-23 27-24 27-25 27-26 31-27 35-28 33-29 35-30 37-31 38-32 42-34 43-35 41-36 41-37 39-38 44-39 +0-0 3-1 4-2 4-3 8-4 6-5 10-6 8-7 15-8 17-9 16-10 18-11 19-12 +0-0 2-1 3-2 2-3 3-4 6-5 6-6 7-7 4-8 10-9 2-10 12-12 11-13 13-14 14-15 +0-0 4-1 5-2 3-3 3-4 8-5 8-6 6-7 8-8 12-9 11-10 17-11 10-13 10-14 11-15 20-16 15-17 16-18 18-19 19-20 27-21 27-22 27-23 32-24 32-25 36-26 35-27 36-28 36-29 41-30 42-32 +0-0 1-1 3-2 5-3 4-4 6-5 6-6 7-8 12-9 11-10 14-11 14-12 17-13 19-14 20-15 21-16 22-17 21-18 16-19 24-20 25-21 27-22 28-23 29-24 30-25 32-26 31-27 +0-0 0-1 2-2 2-3 2-4 3-5 5-6 4-7 6-8 7-9 7-10 8-11 9-12 10-13 11-14 12-15 14-16 16-17 17-18 17-19 18-20 19-21 18-22 +0-0 1-1 2-2 7-3 4-4 5-5 6-6 9-7 10-8 11-9 12-10 13-11 11-12 15-13 14-14 +0-0 1-1 17-2 4-3 5-4 16-5 7-6 19-7 20-8 15-9 15-10 7-11 15-12 15-13 15-14 14-15 18-16 14-17 27-18 27-19 25-20 30-21 29-24 28-25 34-26 33-27 32-28 27-29 30-30 38-31 39-32 41-33 39-34 44-35 45-36 42-37 43-38 47-39 47-40 43-41 49-42 51-43 51-44 57-45 54-46 55-47 56-48 59-49 59-50 58-51 +0-0 1-1 2-2 5-3 5-4 7-5 9-6 10-7 17-8 14-9 14-10 16-11 20-12 20-13 23-14 26-15 27-16 27-17 26-18 28-19 +0-0 3-1 2-2 2-3 2-4 3-5 6-6 6-7 7-8 8-9 11-10 10-11 12-12 13-13 14-15 14-16 17-17 17-18 17-20 18-21 19-22 20-23 21-24 22-25 23-26 24-27 25-28 27-29 33-30 27-31 27-32 27-33 28-34 29-35 31-36 32-37 33-38 30-39 35-40 +0-0 2-1 3-2 0-3 5-4 6-5 +0-0 2-2 3-3 6-4 6-5 7-6 8-7 9-8 10-9 13-10 13-11 15-12 15-13 19-14 19-15 19-16 21-17 21-18 22-19 21-20 27-21 27-22 27-23 32-24 33-25 34-27 31-28 37-29 37-30 37-31 39-32 42-34 43-35 +1-1 3-2 3-3 3-4 3-5 6-6 7-7 9-8 10-9 8-11 13-12 12-13 17-14 17-15 17-16 20-17 22-18 22-19 27-23 25-24 28-25 28-26 30-27 30-28 15-29 32-30 34-31 35-32 37-33 39-34 40-35 39-36 45-37 37-38 48-40 46-41 42-43 47-44 54-45 54-46 51-47 51-48 38-49 51-50 56-51 59-52 58-53 60-55 +0-0 1-1 2-2 3-3 5-4 3-5 12-6 7-7 7-8 12-9 6-10 13-11 13-12 15-13 14-14 17-15 17-16 22-17 20-18 21-21 23-22 +0-0 2-1 4-2 5-3 +0-0 0-1 3-2 3-3 5-4 1-5 6-6 4-7 2-8 6-9 8-10 9-11 11-12 12-13 13-14 13-15 14-16 +1-0 5-1 6-2 5-3 9-4 9-5 11-6 13-7 14-8 15-9 +0-0 0-1 2-2 4-3 4-4 4-5 5-6 +0-0 3-1 1-2 4-3 4-4 5-5 7-6 9-7 8-8 13-9 11-11 15-12 +4-0 5-1 6-2 2-3 3-4 3-5 8-6 1-7 2-8 8-9 9-10 9-11 16-12 10-13 12-14 15-15 11-16 17-17 20-18 21-19 22-20 23-21 20-22 30-24 31-25 25-26 25-28 24-29 33-30 36-32 37-33 39-34 17-35 37-36 34-38 34-39 39-40 40-41 +3-1 4-2 1-3 2-5 6-6 6-7 6-8 8-9 9-10 10-11 10-12 13-13 12-14 16-15 14-16 10-17 16-18 17-19 19-20 15-21 22-23 18-24 24-26 26-27 16-28 26-29 27-30 28-31 26-32 29-33 28-34 31-35 32-36 32-37 30-38 34-39 33-40 34-41 37-42 +0-0 4-1 2-2 2-3 7-4 12-5 8-6 11-7 11-8 19-9 20-10 15-11 24-12 22-13 19-14 18-15 22-16 23-17 25-18 29-19 27-20 27-21 +1-0 0-2 3-3 2-4 9-5 7-6 10-7 8-8 13-9 2-10 11-11 13-12 13-13 16-16 25-17 16-18 17-19 15-20 14-21 21-22 23-25 24-26 26-27 29-28 30-29 31-30 +0-0 1-1 2-2 3-3 12-4 3-5 6-6 8-7 7-8 8-9 11-10 11-11 14-12 4-13 16-14 9-15 13-16 18-17 17-18 16-19 22-20 23-21 24-22 22-23 25-24 +0-0 1-1 8-2 9-3 6-4 7-5 10-6 13-7 13-8 15-9 17-10 16-11 20-12 9-13 25-14 25-15 29-16 25-17 25-18 25-19 38-21 43-22 37-23 41-24 29-25 42-26 41-27 44-28 52-29 55-30 47-32 48-33 58-35 +0-0 2-1 3-2 5-3 3-5 4-6 7-7 6-8 8-9 9-10 11-11 15-12 12-13 13-14 12-15 17-16 17-18 15-19 21-21 18-22 20-23 22-24 22-25 16-26 24-27 +0-0 2-1 4-2 5-3 3-5 10-6 7-7 10-9 13-10 11-11 12-12 15-13 11-14 17-15 19-16 17-17 26-18 21-19 27-20 22-21 22-22 33-23 33-24 25-25 28-27 35-28 27-29 30-30 32-31 32-32 34-33 34-34 36-35 +0-0 4-1 1-2 2-3 3-4 10-5 5-6 6-7 7-8 9-9 8-10 8-11 16-12 7-13 13-14 13-15 15-16 16-17 17-18 20-19 11-20 12-21 19-22 21-23 +0-0 1-1 3-2 3-3 4-4 2-5 6-6 7-7 7-8 7-9 8-10 10-12 10-13 11-14 12-15 13-16 15-17 15-18 15-19 13-20 17-21 +0-0 1-1 2-2 3-3 5-4 4-5 4-6 5-7 6-8 7-9 8-10 9-11 10-12 +0-0 1-1 3-2 4-3 6-4 7-5 8-6 9-7 10-8 +0-0 2-1 3-2 4-3 16-4 9-5 9-6 19-7 9-8 9-9 14-10 11-11 12-12 15-13 19-14 20-15 +4-0 4-1 4-2 7-4 7-5 11-6 11-7 15-8 17-9 19-10 20-11 20-12 22-14 23-15 25-16 28-17 29-18 32-19 32-20 33-21 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 8-7 9-8 12-9 13-10 13-11 13-12 17-14 18-15 18-16 19-17 21-18 22-19 +0-0 2-1 3-2 6-3 7-4 +0-0 2-1 12-2 5-4 1-5 5-6 8-7 9-8 7-9 14-10 12-11 19-12 21-13 23-14 24-15 18-16 19-17 21-18 20-19 26-20 22-21 26-22 27-23 27-24 29-25 +0-0 4-1 2-2 3-3 2-4 4-5 6-6 7-7 7-8 5-9 10-10 10-11 11-12 12-13 14-14 18-15 13-16 17-18 18-19 21-20 20-22 19-23 22-24 20-25 20-26 24-27 22-28 23-29 22-30 28-31 30-32 26-33 27-34 31-35 35-36 35-37 38-39 37-40 37-41 37-42 40-43 +0-0 0-1 2-2 2-3 1-4 6-6 1-8 8-9 8-10 8-11 10-12 14-13 15-14 16-15 11-16 12-17 12-18 15-19 13-20 22-21 20-22 21-23 22-24 22-25 22-26 24-27 21-28 37-29 23-30 26-31 23-32 24-33 31-34 25-35 26-36 37-37 33-38 29-39 30-40 31-41 31-42 33-43 37-44 35-45 36-46 37-47 38-48 41-50 +0-0 3-1 4-2 7-3 8-4 8-5 12-6 10-7 11-8 12-9 13-10 14-11 20-12 21-13 16-14 25-15 26-16 +0-0 5-1 6-2 6-3 7-4 8-5 9-6 7-7 12-9 13-11 13-13 13-14 13-16 11-17 17-18 12-19 13-20 15-22 16-23 19-25 34-26 34-27 31-28 34-29 34-30 34-31 38-32 36-33 38-34 45-35 26-36 41-37 35-38 43-39 37-40 50-41 42-42 43-43 41-44 49-45 46-46 47-47 48-48 49-49 51-50 55-51 54-52 55-53 57-54 58-55 59-56 55-57 61-58 62-59 +0-0 1-1 2-2 2-3 3-4 6-5 5-6 7-7 8-8 10-11 12-12 13-13 +0-0 1-1 1-2 2-3 3-4 4-5 5-6 7-7 6-8 7-9 8-10 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 6-7 6-8 7-9 8-10 6-11 10-12 10-13 11-14 12-15 16-16 14-17 14-18 17-19 18-20 17-21 19-22 +0-0 1-1 1-2 2-3 4-4 5-5 5-6 7-7 6-8 7-9 8-10 7-11 14-12 7-13 18-15 14-17 6-18 18-19 11-20 12-22 14-23 19-24 21-25 19-26 26-27 26-28 20-29 22-30 26-31 26-32 33-33 32-35 37-36 32-37 31-38 33-39 28-40 29-41 32-43 23-44 38-45 +1-1 4-2 4-3 5-4 3-5 9-6 10-7 11-8 12-9 12-10 13-11 18-12 13-13 18-14 28-15 14-16 18-17 20-18 23-19 22-20 27-21 24-22 15-23 29-24 29-25 30-26 32-27 34-28 +1-0 2-1 6-2 2-3 4-4 8-5 4-6 8-7 6-8 10-9 14-10 15-11 17-12 12-13 16-14 16-15 17-16 18-17 21-18 21-19 21-20 +0-0 1-1 2-2 2-3 5-4 5-5 4-6 4-7 12-8 11-9 12-10 14-11 16-12 19-13 20-14 +0-0 2-1 3-2 3-3 5-4 7-5 11-6 10-7 15-8 13-9 11-10 12-12 18-13 17-15 17-16 18-17 22-18 21-19 23-21 24-22 26-23 22-24 25-25 25-26 30-28 32-29 32-30 33-31 34-32 35-33 34-34 38-35 34-36 41-38 42-39 33-40 45-41 45-42 45-44 49-45 48-46 50-47 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 +0-0 1-1 2-2 3-3 4-4 7-5 6-6 9-7 8-8 10-9 11-10 +0-0 2-1 4-2 4-3 10-4 7-5 5-6 6-7 8-8 9-9 11-10 11-11 13-12 13-13 14-14 15-15 16-16 18-17 17-18 18-19 17-20 17-21 18-22 23-23 25-24 25-26 31-27 24-28 20-30 27-31 32-32 30-33 29-34 34-35 35-36 33-37 35-38 36-39 +0-0 1-1 4-2 4-3 11-4 19-5 8-6 5-7 8-8 12-9 16-10 10-11 9-12 12-13 10-14 13-15 15-16 16-17 18-19 19-20 20-21 21-22 29-24 25-25 32-26 31-27 29-28 28-29 34-31 35-32 36-33 37-34 41-35 39-36 39-37 41-38 40-39 43-40 42-41 45-42 44-43 47-44 49-45 49-46 50-47 +0-0 1-1 2-2 4-3 2-4 7-5 6-6 13-7 14-8 13-9 4-10 16-11 17-12 18-13 19-14 18-15 17-16 24-17 25-18 26-19 27-20 23-21 23-22 29-23 31-24 30-25 29-26 34-27 35-28 37-29 33-30 35-32 35-33 39-34 39-35 41-36 42-37 43-38 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 13-8 7-9 15-10 14-11 16-12 17-13 15-14 13-15 19-16 22-17 20-18 14-19 24-20 24-21 13-22 27-23 24-24 23-25 28-26 29-27 27-28 33-29 34-30 34-31 32-32 35-33 33-34 34-35 38-36 37-37 36-38 36-39 39-40 +0-0 1-1 2-2 2-3 3-4 4-5 5-6 +3-0 4-1 2-2 2-3 7-4 5-5 5-6 5-7 1-8 1-9 1-10 13-11 11-12 15-13 12-14 16-15 14-16 18-17 15-18 23-20 18-21 23-22 24-23 25-24 22-25 22-26 24-27 27-28 28-29 25-30 27-31 30-32 31-33 32-34 30-35 34-36 33-37 36-39 36-40 38-41 36-42 33-43 38-44 +0-0 1-1 2-2 3-3 6-4 8-5 9-6 9-7 11-8 13-9 13-10 12-11 15-13 19-14 21-15 24-16 23-17 23-18 25-19 +0-0 1-1 1-2 4-3 5-4 6-5 7-6 8-7 7-8 11-9 12-10 9-11 12-12 1-13 16-15 16-16 17-17 19-18 20-19 18-20 21-21 22-22 22-23 23-24 26-25 29-26 26-27 20-28 30-29 30-30 30-31 32-32 34-33 31-34 35-35 +1-0 1-1 3-2 4-4 10-5 4-6 4-7 4-8 14-9 14-10 22-11 14-12 14-13 16-14 17-15 19-16 21-17 20-18 20-19 25-20 27-21 28-22 29-23 31-24 23-25 34-26 37-28 39-29 40-30 38-31 44-33 38-34 46-35 47-36 41-37 48-38 55-39 52-40 50-41 52-42 52-43 52-44 53-45 54-46 60-47 57-48 59-49 64-51 66-52 65-54 62-55 60-56 67-57 +5-0 6-1 4-2 4-3 3-4 7-5 9-6 10-7 10-8 11-9 14-10 +1-0 0-1 2-2 3-3 4-4 5-5 4-6 5-7 8-8 8-9 8-10 9-11 10-12 12-13 13-14 12-15 15-16 15-17 +0-0 1-1 1-2 3-3 7-4 8-5 3-6 11-7 4-8 8-9 12-10 8-11 11-12 13-13 13-14 13-15 16-16 18-17 19-18 17-19 22-20 16-21 16-22 22-23 23-24 12-25 27-26 23-27 20-28 26-30 26-31 28-32 +0-0 0-1 1-2 1-3 3-4 2-5 3-6 4-7 4-8 4-9 5-10 +0-0 1-1 2-2 3-3 4-5 4-6 5-7 5-8 6-9 7-10 8-11 9-12 9-14 13-15 13-16 11-17 13-18 10-19 11-20 14-21 12-22 15-23 17-24 17-25 18-26 19-27 20-28 21-29 +0-0 1-1 2-2 3-3 4-4 2-5 7-6 7-7 5-8 11-9 12-10 9-11 14-12 15-13 13-14 13-15 18-16 19-17 13-18 17-20 17-21 20-22 22-24 23-25 25-26 26-27 25-28 25-29 31-31 27-32 28-33 28-34 32-35 31-36 33-37 +0-0 1-1 2-2 4-3 5-4 3-5 4-6 10-7 7-8 7-9 11-10 6-11 16-12 17-13 18-14 19-15 14-16 11-17 14-18 14-19 14-20 21-21 23-22 22-23 20-24 18-25 26-28 20-29 24-30 20-31 25-32 24-33 29-34 27-35 29-36 29-37 32-38 31-39 33-40 30-41 34-43 +1-0 2-1 2-2 4-3 5-4 7-5 8-6 6-7 11-8 11-9 12-10 11-11 13-12 15-13 16-14 16-15 17-16 22-17 19-18 24-19 20-20 23-21 23-22 23-23 26-25 +0-0 1-1 2-2 2-3 6-5 4-6 4-7 8-8 17-9 9-10 11-11 11-12 7-13 9-14 9-15 10-16 11-17 16-19 15-20 16-21 19-22 20-23 20-24 17-25 22-26 21-27 22-28 22-29 23-30 21-31 24-32 26-33 28-34 29-35 31-36 31-37 30-38 32-39 +0-0 0-1 1-2 4-3 4-4 5-5 6-6 9-7 8-8 9-9 11-10 10-11 11-12 14-13 14-14 13-15 14-16 18-17 18-18 18-19 20-20 21-21 20-22 23-23 21-24 16-25 24-26 +0-0 1-1 2-2 1-3 1-4 2-5 1-6 2-7 3-8 3-9 3-10 6-11 5-12 3-13 6-14 6-15 6-16 6-17 5-18 6-19 7-20 8-21 8-22 +0-0 0-1 4-2 5-3 5-4 3-5 7-6 5-7 12-8 10-9 11-10 11-11 19-12 15-13 15-14 18-15 15-16 17-17 10-18 20-19 +1-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 12-12 14-13 16-14 17-15 18-16 18-17 20-18 21-19 22-20 23-21 25-22 26-23 27-24 29-25 26-26 28-27 31-28 33-29 43-31 35-32 44-34 45-35 46-36 47-37 36-39 36-40 46-42 45-43 48-44 +0-0 1-1 23-2 23-3 2-4 4-5 5-6 6-7 6-8 9-9 12-10 6-11 13-12 17-14 14-15 44-18 48-20 49-21 48-22 20-23 22-24 17-25 27-26 26-28 27-29 36-30 37-31 35-32 35-33 35-34 40-36 40-38 40-39 43-40 45-41 43-42 44-43 46-44 48-45 49-46 47-47 56-48 57-49 54-50 54-51 51-52 58-54 60-57 +0-0 2-1 2-2 17-4 17-5 19-6 18-7 7-8 5-10 14-11 25-12 12-13 23-14 27-15 28-16 16-17 31-18 31-19 33-20 30-21 32-22 26-23 18-24 9-26 10-27 26-28 28-29 30-31 32-32 34-33 35-35 36-37 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 8-7 10-8 11-9 9-10 9-11 11-12 12-13 13-14 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 12-7 7-8 7-9 10-10 11-11 12-12 13-13 13-14 14-15 15-16 16-17 17-18 +0-0 2-1 3-2 4-3 +1-0 1-1 1-2 4-3 4-4 4-5 15-6 6-7 9-8 9-9 22-10 11-11 12-12 11-13 11-14 15-15 7-16 10-17 15-18 12-19 23-20 23-22 14-23 24-24 26-26 8-27 26-28 27-29 29-30 27-31 30-32 31-33 33-34 35-35 35-36 37-37 39-38 38-39 37-40 40-41 41-42 +0-0 2-1 2-2 3-3 5-4 3-5 4-6 10-7 4-8 3-9 4-10 6-11 8-12 7-13 6-14 12-15 12-16 11-17 13-18 14-19 12-20 12-21 14-22 14-23 15-24 18-25 17-26 18-27 21-29 22-30 22-31 23-32 +0-0 2-1 1-2 2-3 2-4 3-5 6-6 4-7 5-8 7-9 16-10 6-11 8-12 9-13 10-14 11-15 10-16 11-17 12-19 14-20 14-21 15-22 18-23 18-24 19-25 18-26 19-27 +0-0 2-1 4-2 2-3 5-4 6-5 7-6 12-7 10-8 9-9 13-10 13-11 15-12 17-13 17-14 18-15 19-17 19-18 21-19 22-20 23-21 25-22 26-23 +2-0 2-1 4-4 2-5 7-6 7-7 8-8 7-9 6-10 6-11 8-12 10-13 8-14 8-15 12-16 17-17 13-18 17-19 14-20 18-21 14-22 21-23 17-24 17-25 19-26 22-27 20-28 22-29 23-31 23-32 23-33 24-34 +0-0 3-1 1-2 6-5 6-6 17-7 2-8 13-9 12-10 13-11 15-12 14-13 10-14 9-15 15-16 17-17 18-18 19-19 20-20 22-21 20-22 22-23 25-24 28-25 27-27 27-28 28-29 30-30 32-31 29-32 29-33 33-34 +0-0 1-1 1-2 3-3 4-4 8-5 3-6 11-7 7-8 8-9 13-10 8-11 11-12 14-13 14-14 14-15 15-16 15-17 20-18 21-19 19-20 24-21 18-22 18-23 24-24 24-25 25-26 29-27 25-28 22-29 30-30 31-31 31-32 32-33 +0-0 14-1 1-2 3-3 11-4 5-6 8-7 8-8 9-9 11-10 11-12 11-13 11-14 13-15 15-17 12-18 17-19 16-21 18-22 18-23 21-24 25-26 21-28 22-29 22-30 30-31 22-32 30-33 32-35 25-36 29-37 31-38 35-39 28-40 39-41 26-42 33-43 36-44 39-45 39-46 42-47 43-48 42-49 43-50 +0-0 2-1 3-2 +0-0 1-1 2-2 3-3 7-4 5-5 7-6 9-7 10-8 11-9 14-10 14-11 15-12 13-13 18-14 16-15 18-16 19-17 20-18 21-19 24-20 25-21 +2-0 1-1 2-2 3-3 5-4 6-5 8-6 9-7 9-8 15-9 16-10 15-11 15-12 19-13 26-14 26-15 11-16 14-17 22-18 14-19 22-20 24-21 25-22 30-23 29-24 33-25 33-27 34-28 36-29 35-30 31-31 41-32 39-33 41-34 +0-0 1-1 2-2 3-3 4-4 9-5 6-6 7-7 10-8 8-9 10-10 6-11 9-12 10-13 13-14 10-15 15-16 18-17 18-18 19-19 20-20 17-21 16-22 17-23 22-24 22-25 27-26 22-27 22-28 25-30 28-32 26-33 27-34 23-35 31-36 31-37 31-38 32-39 33-40 35-41 35-42 36-43 38-44 39-45 40-46 42-47 39-48 41-49 43-50 +0-0 1-1 3-2 2-3 4-4 5-5 6-6 7-7 9-8 8-9 8-10 13-11 15-12 13-13 15-14 19-15 20-16 21-17 11-18 22-20 22-21 23-24 25-25 28-26 26-27 27-28 27-29 +0-0 1-2 1-3 1-4 0-5 2-6 6-7 6-8 3-9 8-10 9-11 8-12 4-13 10-14 5-15 11-16 11-17 +6-0 7-1 5-2 3-3 4-5 5-6 8-8 10-9 11-11 10-13 13-14 14-15 15-16 16-17 18-18 25-20 20-22 20-23 21-24 23-25 25-26 27-27 29-28 27-29 28-30 +0-0 1-1 3-2 3-3 2-4 7-5 8-6 10-7 11-8 10-9 13-10 4-11 14-12 14-13 18-14 20-15 16-16 22-17 25-18 19-19 22-20 19-21 27-22 19-23 29-24 28-26 27-27 30-28 31-29 32-30 34-31 34-32 33-33 35-34 36-35 38-36 38-37 39-38 39-39 41-40 42-41 43-42 44-43 45-44 +0-0 1-1 2-2 3-3 4-4 6-5 7-6 8-7 9-8 10-9 +0-0 1-1 2-2 3-3 4-4 5-5 4-6 +0-0 1-1 3-2 4-3 +0-0 1-1 3-2 5-3 7-4 6-5 8-6 10-7 13-8 15-9 16-10 17-11 18-12 +1-1 4-2 4-3 5-4 6-5 8-6 9-7 11-8 7-9 10-10 10-11 14-12 11-13 15-14 12-15 18-16 22-17 19-18 29-19 19-20 31-21 18-22 26-23 27-24 26-25 25-26 23-27 23-28 23-30 30-31 3-32 32-34 +0-0 1-1 1-2 2-3 4-4 6-5 5-6 10-7 11-8 11-9 17-10 10-11 15-12 16-13 19-14 20-15 21-16 22-17 14-18 23-19 23-20 28-22 29-23 30-24 23-25 27-27 25-28 31-29 35-31 36-32 35-33 36-34 37-35 38-36 40-37 36-38 41-39 +0-0 1-1 2-2 4-4 5-6 7-7 9-9 11-11 13-12 14-13 15-14 15-15 17-16 17-17 21-19 22-20 17-21 18-22 20-23 21-24 25-25 21-26 22-27 23-28 23-29 29-30 29-31 30-32 25-33 26-34 27-35 28-36 33-37 35-38 37-39 36-40 38-41 +0-0 2-1 2-2 4-3 5-4 7-5 8-6 9-7 10-8 11-9 11-10 13-11 12-12 13-13 17-14 18-15 20-18 19-19 21-20 25-21 26-22 25-23 28-24 29-25 30-26 36-27 32-28 34-29 33-30 36-31 37-33 36-34 36-35 40-36 39-37 40-38 41-39 42-40 44-41 47-42 48-43 49-44 50-45 52-46 46-48 51-49 53-50 +7-0 5-1 2-2 9-3 5-4 7-5 7-6 9-7 8-8 14-9 12-10 14-11 21-12 12-13 27-14 22-15 20-16 23-17 21-18 19-19 20-20 15-21 26-22 30-23 25-24 33-25 26-26 30-27 34-29 +0-0 1-1 2-2 5-3 4-4 6-5 6-6 7-7 15-8 2-9 9-10 10-11 11-12 13-13 7-14 13-15 15-17 10-18 19-21 28-23 29-24 23-25 25-26 24-27 26-29 35-30 29-31 30-32 35-33 32-34 34-35 28-36 34-37 36-38 37-39 40-40 34-41 39-42 39-43 43-45 44-46 +7-0 5-1 6-2 1-3 6-4 7-5 8-7 11-8 11-9 14-10 15-11 16-12 17-13 17-14 19-15 17-16 20-17 20-19 24-20 25-21 26-22 28-23 28-24 30-25 31-26 32-27 33-28 31-29 34-30 +0-0 2-1 3-2 4-3 4-4 7-5 8-6 11-7 12-8 14-9 14-10 17-11 18-12 19-13 21-15 23-16 22-17 31-19 30-20 38-21 25-22 31-24 26-25 34-26 29-28 39-29 39-30 39-31 41-32 42-33 43-34 44-35 45-36 46-37 49-38 50-39 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 9-8 10-9 11-10 17-11 19-12 15-13 16-14 15-15 20-16 20-17 24-19 26-20 28-21 27-23 24-24 29-25 29-26 31-27 31-29 37-30 37-32 37-33 +0-0 1-1 2-2 20-4 6-5 5-6 4-7 9-9 8-10 10-11 12-12 14-13 15-14 13-16 17-17 18-18 16-19 21-20 21-21 23-23 24-24 +0-0 3-1 3-2 1-3 5-4 6-5 7-6 6-7 9-8 9-9 11-10 10-11 12-12 12-13 14-14 23-15 16-16 17-17 17-18 18-19 19-20 21-21 20-22 23-23 22-24 +0-0 1-2 2-4 5-5 5-6 6-7 7-8 9-9 9-10 8-11 13-12 7-13 11-14 12-15 11-16 13-17 14-18 17-19 14-20 14-21 18-23 16-24 19-25 20-26 21-27 22-28 24-29 21-30 23-31 +0-0 0-1 1-2 2-3 3-4 0-5 4-6 6-7 5-8 7-9 7-10 8-11 6-12 9-13 12-14 10-15 11-16 12-17 12-18 14-19 13-20 14-21 16-22 15-23 +0-0 3-1 2-2 3-3 4-4 5-5 7-6 7-7 8-8 9-9 13-10 15-12 14-13 17-14 18-15 19-16 14-17 20-18 24-19 22-20 23-21 28-22 25-23 31-24 31-25 31-26 34-27 35-28 36-29 37-30 38-31 41-32 42-33 43-34 44-35 +0-0 1-1 2-2 4-3 4-4 7-5 7-6 6-7 10-8 9-9 13-10 14-11 11-12 12-13 14-14 17-15 21-16 17-17 17-18 14-19 25-20 19-21 19-22 22-23 31-24 24-25 25-26 30-27 34-28 30-29 34-30 30-31 32-32 35-33 30-34 30-36 37-37 36-38 +0-0 2-1 2-2 3-3 6-4 7-5 9-6 10-7 5-9 8-10 11-11 11-12 13-13 14-14 15-15 21-16 18-17 23-18 18-19 18-20 18-22 28-23 27-24 26-26 28-28 29-29 29-30 31-31 31-32 35-33 34-34 35-35 35-36 37-37 39-38 40-39 40-40 41-41 44-42 42-43 44-44 45-45 +0-0 1-1 3-2 5-3 5-4 15-7 14-8 18-9 18-10 8-11 10-12 13-13 14-14 13-15 20-17 25-18 27-21 1-22 27-23 41-24 42-25 43-26 25-27 32-28 34-29 34-30 22-31 24-32 38-34 32-35 41-37 41-38 44-40 +0-0 +0-0 1-1 2-2 5-3 3-4 5-5 6-6 7-7 7-8 9-9 9-10 +8-0 9-1 7-2 1-3 7-4 2-5 11-6 3-7 11-8 14-9 16-10 17-11 18-13 19-14 19-15 19-16 21-17 28-18 24-19 20-20 23-21 26-22 27-23 28-24 29-25 32-27 33-28 35-29 35-30 34-31 37-32 38-33 43-34 43-35 44-36 46-37 47-38 50-39 49-40 48-41 49-42 48-43 51-44 +0-0 1-1 3-2 4-3 5-4 12-5 8-6 7-7 7-8 8-9 17-10 18-11 11-12 14-13 13-14 16-15 14-16 10-17 11-18 19-19 18-20 +0-0 3-2 3-3 3-4 5-5 8-6 6-7 6-8 7-9 10-11 10-12 11-13 11-14 12-15 13-16 11-17 14-18 15-19 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-8 8-9 10-10 13-12 12-13 12-14 15-15 16-16 17-17 16-18 18-19 20-20 22-22 17-23 22-24 23-25 26-26 26-27 26-28 27-29 33-31 34-33 33-34 32-35 37-36 37-37 35-38 38-39 +1-0 4-1 7-2 1-3 2-4 8-5 7-6 7-7 10-8 10-9 16-10 12-11 12-12 19-13 20-14 16-15 23-16 29-17 24-18 32-19 25-20 28-21 33-23 24-24 30-25 34-26 32-27 33-28 34-29 37-30 38-31 39-32 26-33 41-34 42-35 43-37 +0-0 1-1 2-2 4-3 5-4 7-5 8-6 9-7 8-8 6-9 11-10 10-11 12-12 11-13 14-14 14-15 19-16 15-17 16-18 19-19 19-20 18-21 20-22 20-23 24-24 26-25 27-26 26-27 24-28 28-29 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 7-7 8-8 9-9 8-10 11-11 12-12 14-13 13-14 15-15 +0-0 2-1 1-2 3-3 5-5 4-6 9-7 2-8 4-9 8-10 12-11 13-12 14-13 12-14 12-15 17-17 19-18 18-19 17-20 20-21 18-22 20-23 24-24 27-25 25-26 29-27 25-28 26-29 27-30 28-31 30-32 31-33 32-34 31-35 30-37 35-38 34-39 +0-0 1-1 2-2 2-3 4-4 5-5 6-6 8-7 8-8 10-9 9-10 11-11 12-12 9-13 14-14 15-15 15-16 15-17 15-18 15-19 19-21 17-22 18-23 19-24 22-25 25-26 25-27 26-28 26-29 27-30 28-31 29-32 +0-0 2-1 1-2 6-4 6-5 10-6 12-7 10-8 11-9 11-10 13-11 15-12 16-13 17-14 21-15 18-16 22-17 21-18 25-19 26-20 25-21 26-22 27-23 +0-0 1-1 2-2 3-3 5-4 7-5 5-6 8-8 9-9 10-10 11-11 12-12 14-13 13-14 14-15 16-16 16-17 17-18 17-19 19-20 +0-0 3-1 6-2 4-3 5-4 3-5 7-6 10-7 12-8 16-9 13-10 14-11 15-12 25-14 18-15 21-16 22-17 23-18 26-20 +0-0 1-1 3-3 3-4 5-5 4-6 7-7 7-8 11-9 11-10 8-11 5-12 10-14 5-15 13-16 12-18 11-19 9-20 15-21 14-22 18-23 18-24 19-25 20-26 18-27 21-28 22-29 18-30 23-31 23-32 +0-0 1-1 3-2 5-3 7-4 7-5 6-6 9-7 10-8 10-9 11-10 10-11 18-12 11-13 13-14 13-15 22-16 23-17 17-18 24-19 20-20 20-21 15-22 16-23 25-24 26-25 28-26 27-27 +0-0 2-1 3-2 6-3 5-4 5-5 9-6 6-7 6-8 7-9 12-10 12-11 13-12 12-13 14-14 14-15 15-16 16-17 17-18 14-19 22-21 23-22 24-23 24-24 9-25 27-27 30-28 25-29 29-30 33-31 32-32 32-33 35-34 +7-0 8-1 2-2 10-3 5-4 4-5 9-6 6-7 4-8 10-9 10-10 15-11 15-12 14-13 1-14 16-15 11-16 16-17 16-18 18-19 20-20 23-21 23-22 23-23 24-24 28-25 27-26 29-27 30-28 31-29 32-30 32-31 33-32 36-33 37-34 33-35 37-36 40-37 39-38 41-39 38-40 42-41 44-42 45-43 44-44 +0-0 1-1 2-2 4-4 5-5 8-6 7-7 9-8 10-9 11-10 14-11 19-12 3-13 17-15 24-17 18-18 19-19 23-20 23-21 25-22 26-23 27-24 28-25 30-26 30-27 30-28 29-29 33-30 36-31 35-32 37-33 41-34 38-35 39-36 42-37 44-38 44-39 45-40 47-41 46-42 47-43 48-44 50-45 49-46 51-47 53-48 50-49 55-50 +0-0 2-1 2-2 2-3 5-4 4-5 5-6 7-7 8-8 9-9 +0-0 1-1 2-2 3-3 5-4 8-5 5-6 9-7 10-8 14-9 9-10 9-11 12-14 13-15 15-16 20-17 21-18 23-19 21-20 21-21 24-23 +1-0 2-1 4-2 5-3 6-4 8-5 +0-0 2-1 2-2 3-3 4-4 5-5 6-6 6-7 8-8 8-9 9-10 9-11 11-12 11-13 14-14 15-15 17-16 17-17 17-18 16-19 10-20 18-21 22-22 19-23 21-24 21-25 22-26 20-27 25-28 27-29 29-30 23-31 31-32 32-33 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 11-8 11-9 11-10 10-11 11-12 12-13 13-14 13-15 15-16 16-17 19-18 20-20 20-21 21-22 22-23 28-24 24-25 25-26 26-27 27-28 27-29 29-30 30-31 31-32 32-33 33-34 +3-1 4-2 3-3 1-4 5-5 10-6 7-7 7-8 10-9 9-11 10-12 11-13 12-14 14-15 14-16 15-17 17-18 18-19 19-20 20-21 +0-0 2-1 2-2 5-3 3-4 7-6 8-7 5-8 11-9 11-10 10-11 13-12 19-13 15-14 21-15 15-16 19-17 17-18 25-19 19-21 26-22 27-23 26-24 26-25 32-26 32-27 32-28 35-30 36-31 39-33 41-34 42-35 40-36 41-37 44-38 44-39 37-40 46-41 +1-0 2-1 3-2 3-3 4-4 5-5 3-6 6-8 8-9 8-10 10-11 11-12 10-13 12-14 8-15 12-16 11-18 15-19 14-20 15-21 17-22 18-23 +0-0 1-1 2-2 4-3 5-4 3-5 7-6 16-9 8-10 6-11 8-12 10-13 11-14 12-15 13-16 13-17 14-18 15-19 16-20 17-21 +2-0 2-1 2-2 15-4 6-5 0-6 8-7 7-8 8-9 7-10 10-11 11-12 14-13 5-14 6-15 15-16 16-17 +0-0 2-2 3-3 4-4 11-5 5-6 7-7 8-8 34-9 10-10 10-11 10-12 13-13 20-14 12-15 15-16 17-17 19-18 22-19 25-20 27-22 29-23 30-24 28-25 28-26 23-27 35-28 33-29 28-30 34-31 35-32 37-34 58-35 45-38 42-39 41-40 42-43 29-45 53-47 46-48 56-49 47-50 54-51 51-52 53-53 55-54 56-55 56-57 57-58 58-59 64-60 56-61 61-62 63-63 65-64 67-65 63-67 69-68 70-69 +0-0 1-1 1-2 1-3 2-4 3-5 5-6 5-7 +0-0 1-2 4-3 6-4 2-5 6-6 8-8 1-9 3-11 7-12 10-13 8-14 11-15 12-16 18-18 28-19 20-20 20-21 23-22 33-23 19-24 39-25 39-26 27-27 29-28 29-29 15-30 16-31 29-32 26-33 32-34 34-35 28-36 34-37 32-38 10-39 40-41 +0-0 2-1 8-2 5-4 8-5 8-6 12-7 18-8 10-9 11-10 15-11 10-12 17-13 22-14 18-15 17-16 20-17 17-18 22-19 22-20 23-21 23-22 27-23 28-24 28-25 23-26 30-27 26-28 28-29 28-30 35-31 35-32 40-33 32-34 35-35 43-36 33-37 39-38 39-39 39-40 41-41 46-42 46-43 60-44 52-45 44-46 48-47 55-48 48-49 48-50 50-51 56-52 60-53 52-54 60-55 56-57 61-58 56-59 61-60 +0-0 1-1 4-2 4-3 5-4 4-5 6-7 7-8 11-9 11-10 9-11 11-12 12-13 19-14 15-15 13-16 18-17 19-18 21-19 22-20 23-21 24-22 26-23 26-24 26-25 27-26 28-27 29-28 30-29 31-30 +0-0 1-1 2-2 3-3 4-4 5-5 5-6 8-7 7-8 9-9 12-10 13-11 13-12 13-13 15-14 16-15 10-16 11-17 28-18 23-19 17-20 18-21 23-22 24-23 25-24 25-25 28-26 27-27 30-28 31-29 31-30 35-31 36-32 37-33 40-35 39-36 39-37 41-38 42-39 43-40 44-41 45-42 46-43 41-44 47-45 +0-0 1-1 2-2 2-3 5-5 5-6 9-7 6-8 7-9 7-10 11-12 14-13 13-14 10-15 13-16 16-17 16-18 17-19 19-20 19-21 20-22 26-23 27-24 26-25 11-26 29-27 30-28 24-29 24-30 23-31 33-32 32-33 33-34 43-35 36-36 42-37 35-38 39-39 39-40 41-41 42-42 42-43 45-44 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 15-8 7-9 7-10 9-11 20-12 13-13 9-14 13-15 0-16 14-17 16-18 15-19 17-20 19-22 19-23 20-24 20-26 25-27 26-28 27-30 25-31 34-32 31-33 34-34 22-36 21-37 33-38 +0-0 0-1 1-2 3-3 8-4 4-5 4-6 10-8 9-9 10-10 11-11 12-12 13-13 14-14 15-15 16-16 16-17 17-18 17-19 19-20 +0-0 1-1 2-2 4-3 4-4 7-5 8-6 6-7 9-8 3-9 2-10 11-11 11-12 15-13 12-14 13-15 14-16 14-17 15-18 16-19 19-20 19-21 19-22 20-23 23-24 23-25 24-26 27-28 28-29 27-30 21-31 29-32 30-33 30-34 31-35 +0-0 1-1 1-2 3-3 4-4 3-5 5-6 5-7 6-8 +4-1 6-2 2-3 1-4 1-5 8-6 9-7 10-8 11-9 13-10 14-11 7-12 15-13 16-14 13-15 19-16 22-17 16-18 17-19 20-20 29-21 27-22 23-23 24-24 42-25 30-26 29-27 29-28 34-29 34-30 30-31 35-32 35-33 34-34 39-37 41-38 42-39 39-40 43-42 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 23-8 10-9 9-10 10-11 14-12 10-14 24-16 20-17 21-18 23-19 24-20 24-21 24-22 26-23 28-24 29-25 26-26 31-27 32-28 35-29 36-30 39-31 43-32 38-34 40-35 32-37 42-38 44-39 +0-0 1-1 1-2 5-3 5-4 5-5 7-6 8-7 9-8 14-9 8-10 9-11 11-12 14-13 13-14 14-15 16-16 19-17 20-18 24-19 23-20 23-21 25-22 25-23 +0-0 1-1 3-2 4-3 16-4 5-7 7-8 24-9 15-10 13-11 14-12 13-13 8-14 15-15 18-16 19-17 12-18 15-19 5-20 23-21 25-22 25-23 25-24 26-25 28-26 28-27 16-28 29-29 33-31 35-32 34-33 36-34 +0-0 4-1 8-2 9-3 3-4 8-5 8-6 5-7 9-8 8-9 14-12 14-13 14-14 14-15 14-16 22-17 22-18 17-19 22-20 17-21 34-22 22-23 22-24 31-25 31-26 40-27 31-28 37-29 31-30 40-35 43-37 26-38 26-39 48-43 63-44 48-45 48-48 58-50 58-53 60-54 63-56 63-57 64-58 63-59 66-60 67-61 68-62 +0-0 2-1 2-2 4-3 3-4 5-5 5-7 9-8 10-9 9-10 10-11 10-12 10-13 6-14 7-15 14-16 +0-0 1-1 1-2 3-3 2-4 3-5 6-6 5-7 6-8 5-9 8-10 10-11 9-13 9-14 9-15 10-16 +0-0 1-1 2-2 0-3 3-4 0-5 4-6 6-7 7-8 5-9 10-10 10-11 11-12 11-13 14-14 9-15 8-17 17-18 16-19 19-20 19-21 20-22 21-23 +0-0 +0-0 2-1 4-2 5-3 5-4 7-5 8-6 9-7 10-8 +0-0 2-1 11-2 14-3 2-5 6-6 11-7 1-8 12-9 11-10 9-11 7-12 9-13 13-14 16-15 17-16 17-17 15-18 20-20 22-21 22-22 23-23 25-24 26-25 27-26 28-27 25-28 29-29 31-30 34-31 35-32 32-33 33-34 36-35 39-37 42-38 41-39 43-41 47-42 45-43 46-44 46-45 50-46 38-48 +0-0 1-1 2-2 3-3 7-4 8-5 7-6 7-7 10-8 11-9 11-10 13-11 14-12 18-13 18-14 18-15 18-16 22-17 12-18 26-19 26-20 28-22 30-23 26-24 24-25 32-26 24-27 28-28 34-29 34-30 34-31 40-32 38-33 46-34 38-35 48-36 43-37 50-38 43-39 46-40 44-41 51-42 52-43 54-44 53-45 57-46 57-47 57-48 58-49 59-50 60-51 61-52 62-53 63-54 64-55 65-56 +0-0 1-1 2-2 3-3 5-4 7-5 11-6 10-7 13-8 9-9 12-10 11-11 15-12 18-13 16-14 19-15 19-16 23-17 20-18 33-20 25-21 26-22 28-23 24-26 35-27 34-28 38-29 45-30 38-31 44-32 42-33 39-34 46-35 43-36 47-37 +0-0 4-2 4-3 10-5 10-6 9-7 10-8 10-9 13-10 13-11 6-12 12-13 9-14 19-15 19-17 21-18 18-19 19-20 19-21 27-22 29-24 25-25 29-26 33-27 37-28 28-29 31-30 31-31 34-33 35-34 36-35 37-36 39-37 39-38 41-39 42-40 43-41 44-42 46-43 49-44 46-45 49-46 47-47 52-49 +0-0 1-1 3-2 3-3 1-4 6-5 5-6 5-7 5-8 8-9 9-10 10-11 11-12 9-13 12-14 12-15 13-16 11-17 16-18 17-19 18-20 15-21 16-22 14-23 11-24 19-26 22-27 20-28 22-29 21-30 23-32 23-33 24-34 24-35 +1-0 2-2 1-3 7-4 3-6 6-7 6-8 35-10 9-11 16-12 11-13 11-14 22-15 18-16 17-17 16-19 27-20 23-21 21-22 21-23 26-24 27-25 30-27 26-29 28-31 36-33 37-34 38-35 40-36 41-37 43-38 45-40 47-41 45-43 46-44 50-45 +1-0 2-1 2-2 3-3 4-4 5-5 4-6 7-7 21-8 10-10 12-11 14-13 14-14 15-15 16-16 18-17 18-18 20-19 20-20 22-21 23-22 +0-0 1-1 +0-0 0-1 1-2 2-3 1-4 2-5 3-6 5-7 6-8 +0-0 1-1 2-2 5-3 6-4 7-5 10-6 10-7 8-8 8-9 11-10 11-11 12-12 13-13 19-14 16-15 21-16 19-17 21-18 23-19 15-20 22-21 22-22 25-23 27-24 26-25 30-26 29-27 31-28 +0-0 3-1 5-2 2-3 3-4 3-5 0-6 6-7 7-8 5-9 7-10 23-11 0-12 12-13 11-14 10-15 12-17 19-18 17-19 17-20 16-21 12-22 22-24 8-25 9-26 22-27 24-28 24-29 24-30 25-31 26-32 27-33 29-34 30-35 31-36 +0-0 1-1 3-2 3-3 4-4 2-5 7-6 6-7 6-8 8-9 9-10 8-11 10-12 11-13 11-14 13-15 14-16 17-17 18-18 19-19 15-20 18-21 18-22 20-23 20-24 +0-0 1-1 6-2 0-3 4-4 5-5 7-6 8-7 10-8 8-9 12-10 16-11 16-12 23-13 19-14 17-15 19-16 25-17 21-18 25-21 29-22 30-23 28-24 34-25 31-26 32-27 34-28 10-29 35-30 36-31 +0-0 2-1 6-2 0-3 4-4 10-6 8-7 9-8 10-9 11-10 12-11 13-12 14-13 14-14 2-16 17-17 21-18 20-19 21-20 22-21 23-22 24-23 26-24 26-25 29-26 30-27 29-28 34-29 35-30 35-31 36-32 +2-0 4-1 5-2 1-3 1-4 13-5 7-6 8-7 10-8 10-10 11-11 12-12 13-13 14-14 15-15 14-16 17-17 16-18 19-19 17-20 23-21 14-22 17-23 23-24 23-25 24-26 23-27 24-28 27-29 28-30 25-31 29-32 29-33 +0-0 2-1 3-2 6-3 6-4 5-5 9-6 8-7 8-8 12-9 13-10 11-11 15-12 16-13 17-14 17-15 18-16 19-17 20-18 +0-0 4-1 2-2 4-3 4-4 4-5 3-6 8-7 2-8 7-9 9-10 10-11 11-12 13-14 15-16 13-17 15-18 13-19 15-20 13-21 13-22 17-23 24-24 19-25 21-26 24-27 24-28 26-29 26-30 27-31 24-32 27-33 28-34 28-35 29-38 31-39 34-40 34-41 35-42 +0-0 1-1 0-2 4-3 8-4 10-5 11-6 8-7 10-8 14-9 15-10 16-11 17-12 12-13 12-14 21-15 12-16 20-17 20-18 18-20 22-21 20-22 7-24 29-25 25-26 26-27 27-28 29-29 28-30 29-31 29-32 31-33 35-34 33-35 35-36 34-37 34-38 35-39 38-40 39-42 41-43 41-44 43-45 +0-0 3-1 3-2 7-3 16-4 9-6 10-7 12-8 13-9 12-10 15-11 15-12 13-14 13-15 21-16 23-17 19-18 10-19 27-21 29-22 22-23 25-24 25-25 35-26 37-27 29-28 37-29 40-30 42-32 36-34 37-35 38-36 41-38 42-39 43-40 43-41 +3-0 2-1 2-2 0-3 5-4 5-5 6-6 11-7 9-8 11-9 12-10 15-11 13-12 14-13 16-14 19-15 21-16 19-17 20-18 +0-0 4-1 3-2 3-3 4-4 1-5 10-6 11-7 11-8 13-9 9-10 10-11 17-12 17-13 19-14 14-15 21-16 18-18 20-19 18-20 27-22 28-23 28-24 27-25 28-26 26-27 31-28 28-29 28-30 31-31 33-32 33-33 36-34 33-35 34-36 37-37 +2-0 2-1 7-2 3-3 3-4 4-5 4-6 7-7 7-8 0-9 13-13 12-14 13-15 20-16 17-17 18-18 22-19 20-20 20-21 22-22 20-23 27-24 24-25 27-26 24-27 24-28 27-29 27-30 37-31 37-32 28-33 31-34 31-35 33-36 34-38 36-39 +3-1 2-2 3-3 6-4 8-5 1-6 10-7 16-8 1-9 16-10 13-11 16-12 19-13 17-14 20-15 19-16 19-17 20-18 28-19 13-20 30-21 20-22 34-23 36-24 34-25 34-26 34-27 34-28 32-29 38-30 41-31 41-32 42-33 39-34 49-35 27-36 48-37 45-38 45-39 52-41 51-42 54-43 53-44 52-45 55-46 57-47 +0-0 +1-0 1-1 2-2 2-3 3-4 4-5 7-6 9-7 9-8 +0-0 3-1 2-2 6-3 6-4 9-5 8-6 10-7 11-8 12-9 +0-0 1-1 2-2 3-3 4-4 5-5 8-6 9-7 10-8 12-9 13-10 11-11 16-13 17-14 18-15 19-16 21-17 30-18 23-19 24-20 23-21 26-22 26-23 28-24 29-25 29-26 31-27 +1-1 2-2 2-3 3-4 3-5 5-6 5-7 6-8 6-9 6-10 9-11 10-12 10-13 11-14 13-15 20-16 21-17 15-18 15-19 22-20 23-21 16-22 12-23 13-24 19-26 22-27 22-28 23-29 24-30 +0-0 1-1 10-2 3-3 5-4 7-5 4-6 5-7 11-8 12-9 11-10 14-11 15-12 16-13 17-14 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 5-7 6-8 7-9 5-10 8-11 9-12 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 6-9 13-10 11-11 13-12 13-13 14-14 11-16 13-17 16-18 20-19 20-20 23-22 23-23 24-24 26-25 28-26 28-27 28-28 29-29 +0-0 3-1 1-2 3-3 1-4 3-6 4-7 5-8 3-9 10-10 11-11 6-12 13-13 12-14 14-15 15-16 16-17 17-18 16-19 19-20 21-21 20-22 20-23 21-24 23-25 25-26 25-27 26-28 26-29 27-30 +0-0 2-1 1-3 4-4 7-6 7-7 6-8 8-9 8-10 6-11 11-12 16-13 13-14 21-15 16-16 23-17 13-18 17-19 17-20 18-21 27-22 16-23 21-24 22-25 24-27 22-28 25-29 22-30 25-31 29-32 29-33 +0-0 0-1 1-2 9-3 6-4 3-5 5-6 5-7 8-8 8-9 10-10 11-11 13-12 14-13 2-14 15-16 16-17 15-18 18-20 13-21 24-23 26-25 29-27 31-28 32-31 34-32 34-33 30-34 35-35 36-36 38-37 37-38 40-39 39-40 43-42 43-43 45-44 48-45 47-46 48-47 49-48 50-49 51-50 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 8-8 9-9 10-10 11-11 13-12 14-13 16-15 19-16 20-17 20-18 20-19 21-20 +0-0 1-1 3-2 2-3 5-4 6-5 9-6 6-7 4-8 9-9 6-10 13-11 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 8-7 8-8 7-9 10-10 11-11 10-12 12-13 14-14 14-15 13-16 15-17 +0-0 1-1 2-2 5-3 4-4 3-5 3-6 6-7 7-8 16-9 3-10 8-11 9-12 12-13 12-14 13-15 14-16 15-17 18-18 19-19 19-20 18-21 15-22 15-23 19-24 23-25 21-26 22-27 23-28 19-29 25-30 29-31 27-32 29-33 30-34 31-35 34-37 31-38 34-39 29-40 28-41 28-42 32-43 38-44 38-45 36-46 39-48 40-49 42-50 43-51 44-52 43-53 41-54 45-55 +0-0 0-1 1-2 2-3 3-4 3-5 3-6 4-7 4-8 5-9 6-10 6-11 6-12 +0-0 1-1 1-2 5-3 6-4 2-5 2-6 7-7 8-8 9-9 10-10 11-11 12-12 +0-0 1-1 2-2 4-3 3-4 7-5 5-6 6-7 9-8 13-11 10-12 12-13 15-14 16-15 14-16 16-17 19-18 22-20 22-21 23-22 +0-0 1-1 2-2 4-3 5-4 3-5 8-6 10-7 11-8 12-9 9-10 13-11 14-12 17-13 15-14 18-15 19-16 20-17 19-18 22-19 +0-0 4-1 5-2 1-3 8-4 9-5 2-7 11-8 12-9 14-11 20-12 15-13 15-14 17-15 23-16 23-17 23-18 23-19 24-20 24-21 18-22 19-23 21-24 30-26 31-27 +0-0 1-1 2-2 3-3 5-4 5-5 4-6 4-7 4-8 8-9 9-10 10-11 11-12 12-13 15-14 18-15 19-16 20-17 17-18 16-19 16-20 19-21 24-22 23-24 24-25 25-26 27-27 29-28 30-29 31-30 +0-0 1-1 3-2 3-3 5-4 6-5 10-6 8-7 9-8 17-9 17-10 13-11 14-12 16-13 15-14 17-15 18-16 +0-0 1-1 1-2 4-3 4-4 5-5 5-6 7-7 8-8 9-9 9-10 11-11 12-12 13-13 14-14 13-15 15-16 15-17 17-18 18-19 +0-0 0-1 2-2 3-3 4-4 5-5 +2-0 2-2 0-3 3-4 6-5 8-6 7-7 7-8 5-9 6-10 7-11 11-12 10-13 11-14 20-15 19-16 13-17 14-18 15-19 16-20 23-21 24-22 24-23 26-24 +0-0 1-1 1-2 3-3 5-4 5-6 5-7 8-8 13-9 8-10 9-11 10-12 15-13 11-14 12-15 11-16 12-17 13-18 17-19 17-20 16-21 17-22 22-23 22-24 23-25 24-26 25-27 26-28 +0-0 0-1 2-2 1-3 4-4 5-5 6-6 7-7 8-8 8-9 12-10 9-11 9-12 10-13 13-14 +0-0 2-1 5-2 4-3 5-4 6-5 7-6 8-7 9-8 10-9 11-10 14-11 15-12 +0-0 1-1 2-2 5-3 7-4 8-5 9-6 10-7 12-8 11-9 10-10 14-11 14-12 15-13 17-14 19-15 20-16 22-17 24-18 24-19 26-20 26-21 27-22 +1-0 +0-0 1-1 2-2 4-3 4-4 +0-0 3-1 4-2 5-3 1-4 6-5 5-6 8-7 9-8 11-9 10-10 14-11 13-12 18-13 19-14 22-15 7-16 20-18 20-19 22-20 21-21 22-22 24-23 23-24 25-25 +1-0 2-1 3-2 4-3 5-4 5-5 6-7 9-8 8-9 9-10 11-11 12-12 13-13 14-14 15-15 16-16 +0-0 1-1 2-2 3-3 4-4 6-5 8-6 6-7 8-8 11-10 12-11 15-13 16-14 12-15 17-16 17-17 20-18 21-19 22-20 23-21 +0-0 1-1 2-2 4-3 2-4 5-5 6-6 6-7 8-9 11-10 10-11 12-12 13-13 14-14 15-15 16-16 17-17 18-18 20-19 21-20 21-21 22-22 +0-0 1-1 2-2 4-3 5-4 6-5 8-6 8-7 10-8 11-9 12-10 11-11 13-12 +0-0 1-1 4-2 5-3 9-5 15-6 8-7 3-8 12-9 11-10 10-11 13-12 13-13 17-14 18-15 19-16 18-17 19-18 21-19 24-21 25-23 27-26 29-27 32-29 34-30 37-31 38-32 36-33 40-34 40-35 38-36 41-37 +2-0 5-1 5-2 1-3 9-4 11-5 9-6 16-7 9-8 8-9 18-10 20-11 20-12 23-13 19-14 19-15 19-16 21-17 25-18 20-19 21-20 29-21 18-22 30-23 27-24 30-25 30-26 42-27 47-28 34-29 37-30 50-31 37-32 43-33 41-34 37-35 44-37 37-38 46-40 49-41 49-42 51-43 +0-0 2-1 1-3 5-4 6-5 8-6 9-7 10-8 +2-0 2-1 3-2 3-3 6-4 7-5 8-6 5-7 9-8 9-9 10-10 11-11 10-12 13-13 +0-0 1-1 3-2 3-3 4-4 5-5 6-6 7-7 8-8 10-9 11-10 13-11 14-12 12-13 14-14 17-15 15-16 15-17 20-18 16-19 20-20 19-21 27-23 28-24 24-25 26-26 32-29 30-30 34-31 19-32 33-33 35-34 35-35 35-36 32-37 33-38 39-39 +4-0 4-1 4-2 4-3 10-4 8-5 7-6 7-7 14-8 8-9 12-10 17-11 8-12 15-13 15-14 21-15 18-16 24-17 20-18 26-19 23-20 28-21 23-22 29-23 31-24 27-25 27-26 29-27 27-28 34-29 34-30 36-31 37-32 34-33 29-34 38-35 39-36 40-37 +0-0 1-1 2-2 3-3 2-4 5-5 6-6 4-7 7-8 12-9 11-10 9-11 14-12 9-13 15-14 14-15 14-16 12-17 22-18 25-19 25-20 20-21 24-22 26-23 28-24 28-25 29-26 30-28 29-29 17-30 31-31 +0-0 0-1 3-2 4-3 3-4 6-5 6-6 4-7 7-8 3-9 9-10 11-11 10-12 10-13 8-14 14-15 14-16 17-17 14-18 15-19 16-20 15-21 19-22 20-23 +0-0 1-1 6-2 3-3 4-4 4-5 6-6 9-7 7-8 12-9 12-10 16-11 14-12 15-13 20-14 20-15 21-16 20-17 23-18 24-19 21-20 26-21 28-23 29-24 30-25 31-26 32-27 42-28 40-29 43-30 43-31 44-32 43-33 43-34 46-35 46-36 48-37 +0-0 4-1 5-2 3-3 3-4 8-5 8-6 9-7 7-8 8-9 3-10 11-11 11-12 12-13 +0-0 1-1 3-2 3-3 3-4 5-5 6-6 7-7 +1-0 19-1 1-2 2-3 6-4 6-5 6-6 8-7 9-8 14-9 11-10 13-11 15-12 16-13 28-14 17-15 18-16 19-17 19-18 23-19 21-20 22-21 23-22 25-23 26-24 29-25 28-26 30-27 37-28 35-29 36-31 35-32 35-33 48-34 38-35 38-36 42-37 46-39 37-40 36-41 46-42 51-43 52-44 55-45 54-46 55-47 56-49 57-50 58-51 +1-1 3-2 13-3 10-4 15-5 13-6 14-7 9-8 12-9 12-10 10-11 16-12 22-13 16-14 18-15 20-16 17-17 23-18 24-19 +0-0 2-1 3-2 1-3 5-4 5-5 4-6 10-7 11-8 11-9 14-10 15-11 16-12 19-13 11-14 21-15 18-16 19-17 22-18 21-19 29-20 26-22 26-23 31-24 29-25 27-26 32-27 36-28 37-29 38-30 35-32 39-33 +0-0 5-1 10-2 15-3 6-4 9-5 13-6 3-7 16-8 9-10 10-11 20-12 23-16 24-17 24-18 21-19 15-20 15-21 24-22 27-23 34-24 35-25 36-26 33-27 37-28 33-29 40-30 37-31 38-32 42-33 39-34 42-35 +2-0 1-1 2-2 3-4 4-5 1-6 7-7 5-8 8-9 8-10 9-11 6-12 15-13 9-14 13-15 17-17 12-18 24-19 19-20 21-21 20-23 24-25 25-26 30-27 25-28 28-29 30-30 31-32 +1-0 1-1 1-2 4-3 4-4 5-5 6-6 9-7 10-8 12-9 14-10 14-11 16-12 16-13 17-14 19-15 20-16 22-17 24-18 26-19 25-20 23-21 26-22 29-23 +0-0 1-1 1-2 4-3 4-4 3-5 5-6 6-7 8-8 9-9 14-10 9-11 10-12 12-13 14-14 12-15 13-16 14-17 18-18 21-20 23-21 23-22 17-23 24-24 +0-0 1-1 3-2 4-4 7-5 6-7 5-8 10-9 7-10 7-11 9-12 14-13 11-14 16-15 15-16 16-17 17-18 14-19 18-22 20-23 22-24 22-25 23-26 25-27 28-28 28-29 34-30 30-31 34-34 33-36 36-37 +1-0 1-1 2-2 4-3 7-4 5-5 7-6 8-7 11-8 10-9 10-10 10-11 12-12 13-13 16-14 15-15 16-16 18-17 20-18 19-19 21-20 +0-0 2-1 1-2 4-3 3-4 1-5 7-6 6-7 7-8 8-9 10-10 11-11 12-12 6-13 14-14 12-15 14-16 16-17 14-18 16-19 18-20 19-21 20-22 21-23 11-24 22-25 +0-0 2-1 4-2 1-3 3-4 6-5 10-6 8-7 9-8 10-9 11-12 12-13 15-14 23-15 19-16 22-17 22-18 21-19 22-20 21-21 24-22 +1-1 2-2 4-3 3-4 4-5 11-6 8-7 8-8 9-9 11-10 12-11 13-12 14-13 +0-0 1-1 4-2 3-3 5-4 6-5 8-6 9-7 10-8 13-9 10-10 12-11 11-12 13-13 14-14 14-15 20-16 21-17 18-19 22-21 22-22 23-23 +0-0 1-1 2-2 2-3 2-4 4-5 4-6 4-7 7-8 6-9 9-10 8-11 7-12 8-13 10-14 11-15 13-16 13-17 10-18 14-19 15-20 +2-0 3-1 4-2 5-3 10-4 7-5 9-6 12-7 11-8 14-9 14-10 15-11 +0-0 1-1 0-2 2-3 2-4 2-5 4-7 4-8 5-9 6-10 8-12 10-13 11-14 11-15 12-16 15-17 14-19 19-20 19-21 18-22 18-24 20-25 +1-0 1-1 2-2 3-3 4-4 5-5 7-6 8-7 9-8 12-9 12-10 15-11 15-12 16-13 +0-0 1-1 5-2 3-3 6-4 7-5 8-6 8-7 9-8 10-9 12-10 13-11 15-12 15-13 17-14 17-15 18-16 19-17 20-18 22-19 22-20 23-21 26-22 27-23 29-24 27-25 30-26 +0-0 2-1 3-2 4-4 4-5 6-6 7-7 8-8 7-9 9-10 10-11 11-12 11-13 13-15 15-16 16-17 16-18 16-19 17-20 +0-0 1-1 3-2 2-3 5-4 6-5 5-6 6-7 7-8 6-9 15-10 14-11 17-12 13-13 14-14 13-15 16-17 19-18 19-19 20-20 21-21 23-22 22-23 19-25 20-26 25-28 27-29 29-30 30-31 30-32 31-33 +0-0 1-1 5-3 7-4 4-5 1-7 3-8 9-9 10-10 10-11 9-12 10-13 14-14 14-15 22-16 18-17 18-18 20-19 21-20 18-21 17-22 28-23 20-24 28-25 20-26 30-27 31-28 32-29 27-30 34-32 38-33 35-35 42-37 38-38 40-39 41-40 42-42 44-43 44-44 46-45 46-46 48-47 48-48 49-49 +0-0 1-1 4-2 3-3 4-4 5-5 6-6 8-7 9-8 10-9 13-10 14-11 15-12 22-13 13-14 16-15 17-16 19-17 18-18 25-19 25-20 24-21 25-22 26-23 27-24 29-25 36-26 33-27 34-28 30-29 35-31 35-32 36-33 41-34 42-35 43-36 44-37 45-38 46-39 48-40 49-41 49-42 50-43 +0-0 1-1 3-2 4-3 6-4 7-5 8-6 9-7 10-8 +2-1 3-2 4-3 6-5 5-6 7-7 9-8 9-9 14-11 11-12 12-13 12-14 14-15 22-16 16-17 22-18 21-20 18-21 24-22 22-23 22-24 29-25 30-26 29-27 30-28 31-29 31-30 30-31 35-32 37-34 35-35 43-36 38-38 38-39 39-41 40-43 45-45 +1-0 1-1 2-2 11-3 2-5 9-6 10-7 6-8 7-9 10-10 11-12 13-13 13-14 14-15 16-16 14-17 17-18 20-19 18-20 20-21 15-22 25-23 22-24 19-25 28-26 28-27 29-28 30-29 32-30 30-31 31-32 30-33 36-34 36-35 36-38 32-40 41-41 40-42 +0-0 1-1 4-2 1-3 3-4 4-5 5-6 5-7 9-8 6-9 9-10 10-11 +0-0 14-2 5-3 5-4 2-5 8-6 8-7 6-8 11-9 9-10 14-11 15-12 +0-0 1-1 6-3 7-4 4-5 8-6 9-7 10-8 12-10 7-11 15-12 13-13 13-14 12-15 16-16 17-17 4-19 16-20 29-23 30-24 23-25 33-26 33-27 21-28 24-29 30-30 26-31 27-32 26-33 31-34 35-35 33-36 40-37 35-38 40-40 35-41 35-42 46-43 38-44 45-45 48-46 46-48 47-49 48-50 52-51 49-52 52-53 51-54 48-55 53-56 53-57 57-59 56-60 +1-0 2-1 2-2 4-3 5-4 6-5 7-6 +0-0 4-1 4-2 3-3 6-4 5-5 9-6 6-7 11-8 10-9 12-10 12-11 16-12 17-13 16-14 1-15 18-16 18-17 21-18 18-19 18-20 23-21 25-22 25-23 26-24 +0-0 1-1 1-2 4-4 6-5 6-6 6-7 8-8 8-9 10-10 11-11 10-12 13-13 14-14 14-15 15-16 +0-0 1-1 2-2 2-3 4-4 6-5 7-6 8-7 9-8 10-9 11-10 10-11 12-12 13-13 14-14 14-15 17-17 +0-0 2-1 2-2 7-3 7-4 1-5 2-6 8-7 6-8 10-9 8-10 12-11 11-12 11-13 13-14 13-16 14-17 15-18 16-19 18-20 21-21 23-22 19-23 21-24 22-25 +0-0 1-1 2-2 3-3 6-4 6-5 4-6 6-7 9-8 10-9 11-10 12-11 11-12 16-15 15-16 17-17 19-18 18-19 20-20 22-21 23-22 +0-0 10-1 1-2 2-3 4-4 6-5 5-6 13-7 13-8 11-9 12-10 13-11 13-12 16-13 13-14 16-16 17-17 18-18 18-19 20-20 20-21 21-22 27-23 27-24 27-25 27-26 27-28 32-29 32-30 34-31 34-33 32-34 37-35 37-36 37-37 36-38 41-39 41-40 39-41 40-42 41-43 41-44 41-45 41-46 49-47 52-48 50-49 45-50 52-52 58-53 55-54 53-55 54-56 55-57 58-59 60-60 61-61 62-62 63-63 71-64 65-65 68-66 69-67 71-68 +0-0 0-1 1-2 2-3 3-4 5-5 4-6 6-7 6-8 7-9 8-10 9-11 +6-0 0-1 4-2 2-3 2-4 4-5 5-6 5-7 7-8 8-9 10-10 11-11 11-12 11-13 15-14 12-15 13-16 19-17 16-18 18-19 21-20 13-22 5-23 19-24 15-25 22-26 24-27 15-28 12-29 27-30 28-31 28-32 29-33 29-34 30-35 +0-0 1-1 2-2 0-3 3-4 3-5 7-6 6-7 4-8 9-10 10-11 7-12 14-13 14-14 13-15 18-16 18-17 20-18 25-19 20-20 21-21 22-22 23-23 19-24 31-25 24-26 24-27 31-28 31-29 26-30 32-31 +0-1 0-2 1-3 4-4 3-5 4-6 6-7 6-8 9-9 10-10 11-12 12-13 14-14 15-15 13-17 1-18 20-19 18-20 20-21 20-22 23-23 21-24 21-25 24-26 26-27 25-28 27-29 28-30 28-31 24-32 31-33 +2-0 4-1 +4-0 3-1 5-2 6-3 5-5 7-6 9-7 13-8 14-9 15-10 11-11 12-12 21-13 13-14 16-15 17-16 31-17 19-18 20-19 17-20 21-21 21-22 30-24 28-25 25-27 28-28 27-29 36-30 38-31 39-32 40-33 35-34 36-35 41-36 41-37 27-38 45-39 45-40 46-41 +0-0 1-1 2-2 2-3 4-4 5-5 5-6 9-7 8-8 4-9 4-10 14-11 8-12 11-13 15-14 16-15 13-16 14-17 18-18 16-19 23-20 18-21 16-22 20-23 17-24 20-25 19-26 23-27 24-28 25-29 +0-0 9-1 3-2 6-3 7-4 8-5 9-6 9-7 10-8 13-9 13-10 14-11 15-12 14-13 19-14 14-15 21-16 22-17 24-18 30-19 27-21 30-23 30-24 32-25 33-26 36-27 38-28 36-29 39-30 39-31 41-32 43-33 43-34 45-35 46-36 47-37 50-38 51-39 51-40 +0-0 0-1 0-2 2-3 3-4 5-5 5-6 4-7 8-9 8-10 8-11 16-12 17-13 11-14 9-15 12-17 15-18 13-19 15-20 10-21 15-22 17-23 18-24 +2-1 4-2 7-3 5-4 5-5 11-6 8-7 16-8 17-9 18-10 16-11 9-13 16-14 19-15 15-16 35-17 22-18 30-20 28-21 38-22 27-23 29-24 28-25 31-26 32-27 32-28 31-29 34-30 34-31 36-32 33-33 39-34 40-35 +0-0 1-1 2-2 3-3 5-4 7-5 8-6 9-7 12-8 12-9 16-10 17-11 18-12 19-13 16-14 19-15 20-16 +1-0 1-1 1-2 4-3 3-4 6-5 10-6 7-7 9-8 11-9 9-10 11-11 13-12 14-13 15-14 16-15 17-16 +0-0 1-1 1-2 2-3 2-4 3-5 4-6 4-7 6-8 7-9 8-10 9-11 8-12 9-13 +0-0 1-1 2-2 4-3 4-4 7-5 8-6 9-7 9-8 11-9 +0-0 1-1 2-2 3-3 4-4 8-6 7-7 6-8 8-9 6-11 12-12 11-13 12-14 13-15 18-16 14-17 14-18 23-19 17-20 20-21 17-24 14-25 23-26 26-27 28-28 31-29 29-30 30-31 28-32 32-33 32-34 33-35 35-36 35-37 33-38 35-39 38-40 42-41 43-42 41-43 45-44 46-45 41-46 41-47 45-48 50-49 51-50 52-51 52-52 49-53 55-54 52-55 52-57 54-58 56-60 +1-0 1-3 2-4 3-5 4-6 8-8 6-9 7-10 8-11 8-12 20-13 8-14 15-15 13-16 27-17 20-19 19-21 15-22 17-23 7-24 21-25 21-26 21-27 18-29 12-30 24-31 26-32 23-33 28-34 28-35 +0-0 2-1 4-2 2-3 4-5 8-6 7-7 9-8 9-9 9-10 10-11 11-12 11-13 14-14 15-15 14-16 16-17 17-18 +2-0 0-1 0-2 2-3 7-4 7-5 8-6 7-7 12-8 11-9 13-10 14-11 10-12 15-13 16-14 8-15 17-16 18-17 17-18 20-20 22-21 23-22 22-23 20-24 29-25 27-26 27-27 24-28 30-29 32-30 32-31 32-32 33-33 34-34 35-35 +0-0 1-1 2-2 1-3 7-4 7-5 8-6 9-7 7-8 11-9 12-10 13-11 14-12 15-13 16-14 15-15 18-16 19-17 20-18 20-19 23-20 24-21 25-22 27-23 27-24 21-25 27-26 29-27 29-28 32-29 33-30 34-31 35-32 37-33 37-34 37-35 38-36 39-37 +0-0 2-1 2-2 2-3 4-4 2-5 4-6 5-7 5-8 6-9 9-10 10-11 11-12 13-13 14-14 15-15 16-16 17-17 22-18 17-19 22-20 24-21 24-22 22-23 24-24 27-25 +2-1 6-2 9-3 5-4 7-6 1-7 2-8 13-10 6-11 13-12 11-13 19-15 21-16 20-17 19-18 20-19 22-20 27-21 28-22 26-23 26-24 29-27 30-28 33-29 32-30 36-31 36-32 36-33 39-34 40-35 40-36 41-37 42-38 43-39 +1-0 1-1 2-2 6-3 8-4 12-5 12-6 14-7 +1-0 2-1 3-2 3-3 3-4 4-5 5-6 6-7 10-8 11-9 13-10 11-11 12-12 15-13 16-14 17-15 17-16 22-17 23-18 17-19 20-20 21-22 21-23 24-26 26-27 26-28 28-29 29-30 27-31 31-32 31-33 32-34 +0-0 1-1 2-2 3-3 4-4 3-5 3-6 3-7 7-8 1-9 9-10 8-11 8-12 9-13 11-14 13-15 12-16 16-17 12-18 16-19 14-20 15-21 16-22 18-23 18-24 19-25 20-27 +2-1 5-2 5-3 8-4 7-5 11-6 6-7 17-8 14-9 13-10 17-12 18-13 15-14 18-15 19-16 19-17 22-18 20-19 23-20 24-21 24-22 26-23 27-24 +0-0 1-1 2-2 5-3 7-4 3-5 12-6 9-7 11-8 11-10 13-11 15-12 15-13 17-14 18-15 19-16 21-17 21-18 13-19 23-20 24-21 26-22 27-23 27-24 30-25 32-26 32-27 33-28 +0-0 0-1 2-2 3-3 4-4 5-5 6-6 7-7 6-8 9-9 12-10 11-11 8-12 13-13 4-15 14-16 15-17 15-18 15-19 17-20 17-21 18-22 20-23 21-24 22-25 22-26 23-27 24-28 26-29 27-30 26-31 29-32 24-33 31-34 30-35 31-36 32-37 33-38 34-39 35-40 +0-0 1-1 5-2 5-3 6-4 7-5 9-6 10-7 11-8 12-9 14-10 15-11 16-12 17-13 17-14 20-15 19-16 21-17 24-19 25-20 26-21 27-22 29-23 30-24 +0-0 1-1 3-2 3-3 4-4 5-5 8-6 7-7 7-8 10-9 6-10 12-11 +0-0 1-1 2-2 3-3 5-4 9-5 4-6 9-7 10-8 13-9 13-10 12-11 16-12 16-13 17-14 18-15 16-16 22-17 20-18 22-19 21-20 25-21 26-22 28-23 27-24 29-25 30-27 30-28 32-29 +0-0 1-1 1-2 2-3 3-4 3-5 7-6 5-7 7-8 9-9 13-10 10-11 9-12 11-13 12-14 12-15 13-16 14-17 15-18 +0-0 1-1 2-2 3-4 4-5 9-6 8-7 7-8 14-9 9-10 14-11 17-13 16-14 13-15 20-16 23-17 24-18 25-19 26-20 22-21 18-22 27-23 30-24 32-25 33-26 34-27 34-28 37-30 +0-0 0-1 1-2 2-3 3-4 5-6 7-7 9-8 9-9 7-10 10-12 11-13 12-14 15-15 12-16 15-17 15-18 15-19 13-20 14-21 14-22 19-23 21-24 21-25 22-27 23-28 22-29 23-30 24-31 +0-0 4-1 5-2 6-3 10-4 5-5 8-6 6-7 10-8 2-9 2-10 9-11 11-12 12-13 12-14 17-15 16-16 19-17 18-18 20-19 21-20 23-22 +0-0 4-1 +0-0 1-1 3-2 3-3 7-4 4-5 8-6 4-8 8-9 11-10 14-11 13-13 16-14 16-15 17-16 17-18 18-19 20-20 22-21 22-22 23-23 24-24 23-25 26-26 27-27 +0-0 1-1 3-2 4-3 5-4 7-5 5-6 6-7 7-8 8-9 9-10 +0-0 4-1 5-2 2-3 3-4 3-5 4-6 6-7 6-8 7-9 7-10 8-11 9-13 11-14 12-15 13-16 14-17 13-18 15-19 +0-0 1-1 1-2 3-3 3-4 3-5 5-7 7-8 7-9 8-10 +0-0 1-1 2-2 2-3 3-4 4-5 +0-0 1-1 2-2 3-3 5-4 5-5 5-6 5-7 4-8 8-9 9-10 +0-0 1-1 2-2 13-3 3-4 4-5 5-6 6-7 7-8 9-9 12-11 10-12 13-13 14-14 16-15 17-16 18-17 19-18 20-19 +0-0 1-1 2-2 3-3 4-4 2-5 6-6 7-7 8-8 +0-0 1-1 0-2 3-3 4-4 4-5 5-6 +0-0 3-1 3-3 5-4 7-5 7-6 8-7 9-8 9-9 11-10 13-11 15-12 15-13 15-14 16-15 17-16 +0-0 1-1 2-2 3-3 33-5 4-6 19-7 7-8 8-9 14-10 13-11 15-12 12-13 13-14 18-15 21-16 17-17 21-18 23-19 30-20 24-21 25-22 25-23 26-24 27-25 28-26 33-27 34-28 +0-0 1-1 3-2 4-3 4-4 5-5 7-6 8-7 9-8 10-9 12-10 10-11 12-13 16-14 12-15 21-16 17-17 11-18 18-19 21-20 18-21 20-22 20-23 23-24 24-25 23-26 26-27 27-28 27-29 28-30 +3-0 2-1 2-2 1-3 3-4 6-5 7-6 8-7 9-8 19-10 12-11 17-12 6-13 16-14 12-15 16-16 10-17 21-18 +0-0 1-1 4-3 4-4 5-5 9-8 9-9 7-10 11-11 13-12 10-13 14-14 13-15 18-16 16-17 19-18 19-19 22-20 21-21 23-22 +11-0 2-1 4-2 4-3 5-4 18-5 7-6 8-7 10-8 7-9 12-10 2-11 3-12 16-13 9-14 15-15 17-16 12-17 19-18 +0-0 1-1 2-2 3-3 6-4 7-5 9-6 10-7 11-8 8-9 12-10 12-11 16-12 16-13 17-14 18-15 19-16 +0-0 1-1 5-2 3-3 7-4 6-5 7-6 8-7 8-8 9-9 11-10 12-11 14-12 15-13 +0-0 3-1 3-2 5-3 7-4 8-5 +0-0 1-1 3-2 3-3 1-4 5-5 6-6 7-7 5-8 8-9 8-10 9-11 +0-0 0-1 1-2 3-3 3-4 4-5 6-6 5-7 7-8 8-9 8-10 5-11 9-12 8-13 11-14 12-15 9-16 10-17 10-18 12-19 13-20 +0-0 1-1 3-2 3-3 4-4 5-5 5-6 7-7 7-8 7-9 13-10 14-11 10-12 11-13 9-14 14-15 14-16 15-17 16-18 18-20 18-21 19-22 22-24 22-25 23-26 24-27 25-28 26-29 27-30 28-31 29-32 29-33 31-34 28-35 32-36 +0-0 2-1 3-2 5-3 6-4 +0-0 2-1 0-2 3-3 4-4 5-5 6-6 7-7 10-8 10-9 11-10 14-11 16-12 14-13 16-14 18-15 19-16 +0-0 0-1 1-2 1-3 3-4 5-5 7-7 6-8 10-11 16-12 11-13 11-14 13-15 15-16 15-17 17-18 17-19 18-20 +0-0 1-1 2-2 3-3 4-4 4-5 4-6 5-7 +0-0 1-1 2-2 3-3 3-4 4-5 8-6 6-7 6-8 6-9 8-10 9-11 10-12 +0-0 1-1 3-2 4-3 5-4 15-5 4-6 4-7 7-8 8-9 9-10 10-11 11-12 12-13 13-14 6-15 17-16 16-17 17-18 18-19 19-20 20-21 21-22 22-23 23-24 24-25 +0-0 1-1 2-2 4-3 4-4 6-5 +0-0 0-1 2-2 2-3 3-4 3-6 5-7 6-8 7-9 8-10 8-11 9-12 9-13 9-14 11-15 12-16 10-17 13-18 15-19 15-20 16-21 +0-0 1-1 1-2 4-3 5-4 6-5 7-6 7-7 8-8 10-9 9-10 12-11 11-12 13-13 13-14 15-15 16-16 16-17 17-18 +0-0 1-1 1-2 3-3 4-4 5-5 6-6 7-7 8-8 10-9 11-10 +0-0 4-1 1-2 3-3 3-4 4-5 6-6 10-7 7-8 11-9 13-10 14-11 13-12 15-14 18-16 19-17 19-18 21-19 22-20 24-21 24-22 25-23 +1-0 1-1 1-2 3-4 4-5 5-6 5-7 7-8 7-9 +0-0 1-1 2-2 3-3 4-4 5-5 4-6 6-7 7-8 8-9 9-10 10-11 13-12 11-13 12-14 13-15 14-16 +3-2 5-3 6-4 6-5 7-6 8-7 9-8 10-9 7-10 13-11 15-12 16-13 15-14 18-15 19-16 20-17 23-18 21-19 22-20 23-21 24-22 24-23 26-24 27-25 +0-0 1-1 7-2 3-3 4-4 4-5 5-6 7-7 8-8 9-9 8-10 14-11 10-12 17-13 18-15 15-16 15-17 18-18 18-19 19-20 +0-0 1-1 2-2 9-3 2-4 4-5 5-6 5-7 6-8 7-9 10-10 9-11 10-12 10-13 12-14 13-15 16-16 12-17 19-18 22-20 16-21 18-22 23-23 25-24 24-25 26-26 26-28 27-29 26-30 27-31 28-32 +0-0 1-1 4-2 5-3 6-4 3-5 6-6 7-7 +0-0 1-1 3-2 3-3 3-4 4-5 11-6 7-7 7-8 7-9 7-10 10-11 9-12 10-13 12-14 14-16 14-17 15-18 +0-0 4-1 5-2 6-3 4-4 3-5 9-6 8-7 11-8 12-10 12-11 15-12 16-13 17-14 18-15 19-16 27-18 21-20 21-21 30-22 31-23 32-24 33-25 35-26 40-28 38-29 41-30 43-32 36-33 34-34 39-35 42-36 45-37 45-38 45-39 +0-0 10-1 2-2 2-3 6-4 6-5 6-6 7-7 11-9 14-10 12-11 13-12 18-14 19-15 18-16 20-17 20-18 21-19 +1-0 +0-0 1-1 2-2 4-3 4-4 4-5 6-7 7-8 8-9 8-10 9-11 10-12 11-13 12-14 13-15 14-16 15-17 16-18 16-19 17-20 17-21 +0-0 2-1 3-2 4-4 5-5 8-6 10-7 11-8 8-9 10-10 6-11 12-12 16-13 11-14 14-15 15-16 13-17 16-18 18-19 18-20 22-22 23-23 24-24 25-25 26-26 27-27 28-28 29-29 29-30 31-31 32-32 35-33 27-34 35-35 36-36 37-37 41-38 41-39 39-40 40-41 14-42 41-43 42-44 43-45 44-46 43-47 42-48 43-49 49-50 50-51 52-52 53-53 47-54 51-55 52-56 54-57 +0-0 1-1 2-2 3-3 4-4 5-5 7-6 7-7 6-8 9-9 9-10 9-11 12-12 10-13 12-14 11-15 18-16 17-18 20-19 21-20 22-21 23-22 24-23 24-24 27-25 26-26 27-27 26-28 +0-0 2-1 3-2 3-3 4-4 6-5 5-6 +0-0 1-1 4-2 3-3 6-4 7-5 7-6 8-7 9-8 13-9 14-10 15-11 16-12 11-13 12-14 17-15 17-16 18-17 19-18 20-19 6-21 24-22 24-23 26-24 28-25 29-27 33-28 29-29 32-30 34-32 +0-0 4-1 1-2 2-3 4-5 7-6 7-7 7-8 7-9 9-12 8-13 14-16 12-17 14-18 14-19 16-20 19-21 20-22 17-23 18-24 25-25 26-26 25-27 23-28 26-29 27-30 20-31 27-32 32-33 30-34 38-36 25-37 35-38 34-39 34-40 34-41 43-43 40-44 37-45 42-47 43-48 44-49 43-50 45-51 46-52 44-53 45-54 47-55 +0-0 1-1 2-2 3-3 3-4 3-5 6-6 6-7 7-8 7-9 7-10 7-11 11-13 12-14 13-15 13-16 15-17 14-18 14-19 18-20 18-21 18-22 21-23 22-24 23-25 20-26 19-27 22-28 23-29 24-30 +1-0 0-1 1-2 2-3 2-4 4-5 3-6 7-7 4-8 5-9 5-10 9-11 6-12 12-13 13-14 12-15 14-16 13-17 13-18 16-19 17-20 17-21 17-22 20-23 19-24 20-25 +0-0 1-1 +1-0 1-1 1-2 2-3 1-4 4-5 1-6 6-7 9-8 9-9 8-10 12-11 10-12 11-13 12-15 12-16 16-17 16-18 12-19 18-20 19-21 20-22 21-23 23-24 24-25 22-27 26-28 29-29 34-30 27-31 32-32 30-33 31-34 35-35 29-36 20-37 34-39 38-41 43-42 44-43 42-44 44-45 42-46 47-47 48-48 +0-0 1-1 3-2 3-3 6-4 6-5 8-6 9-7 10-8 11-9 +0-0 2-1 1-2 3-3 4-4 5-5 5-6 7-7 8-8 9-9 11-12 11-13 13-14 12-15 14-16 16-17 17-18 18-19 19-20 19-21 19-22 22-23 +0-0 1-1 1-2 2-3 3-4 5-5 4-6 7-7 6-8 8-9 8-10 9-11 +1-0 1-1 9-2 3-3 4-4 5-5 6-6 7-7 9-9 11-10 13-11 15-12 16-13 13-14 10-15 17-16 18-17 +0-0 1-1 3-2 3-3 4-4 7-5 6-6 10-7 12-8 13-9 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 7-8 9-9 10-10 +0-0 1-1 1-2 3-3 4-4 4-5 5-6 8-7 9-8 7-9 10-10 +0-0 3-1 1-2 2-3 5-4 5-5 6-6 5-7 9-8 10-9 10-10 11-11 12-12 +0-0 1-1 1-2 2-3 2-4 2-5 4-6 5-7 5-8 8-9 7-10 8-11 9-12 12-13 12-14 11-15 12-16 13-17 16-18 16-19 16-20 17-21 18-22 +0-0 1-1 2-2 5-3 6-4 4-5 5-6 8-7 7-8 9-9 8-10 7-11 12-13 11-14 11-15 12-16 3-17 15-18 17-19 16-20 18-21 17-22 18-23 +0-0 1-1 1-2 3-3 4-4 7-5 7-6 6-7 6-8 9-9 12-10 12-11 13-12 14-13 +0-0 1-1 3-2 5-3 6-4 5-5 8-6 9-7 12-8 7-9 17-11 12-12 11-13 17-14 15-15 19-17 18-18 20-19 20-20 21-21 23-22 24-23 +1-1 2-2 5-3 6-4 3-5 7-6 9-7 3-8 8-9 9-10 10-11 11-12 12-13 14-14 15-15 15-16 16-17 19-18 17-19 18-20 22-21 23-22 25-24 24-25 26-26 24-27 20-28 21-29 28-30 +0-0 1-1 0-2 12-3 12-4 5-6 6-7 7-8 8-9 9-10 10-11 5-12 6-13 14-14 16-15 16-16 14-17 18-18 19-19 21-20 19-21 21-23 24-24 23-25 25-26 26-27 26-28 +0-0 1-1 2-2 4-3 5-4 7-5 +0-0 10-1 2-2 12-3 13-4 15-5 16-6 17-7 9-8 19-9 11-10 15-11 13-12 14-13 16-14 14-15 19-16 21-17 17-18 24-19 20-20 24-21 7-22 26-23 +0-0 1-1 3-2 2-3 5-4 3-5 10-6 8-7 9-8 11-9 11-10 13-12 16-13 16-14 18-15 18-16 19-17 +1-0 1-1 2-2 3-3 3-4 6-5 5-6 6-7 8-8 7-9 8-10 8-11 8-12 10-13 10-14 12-15 +3-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 12-12 12-13 15-14 12-15 16-16 16-17 17-18 17-19 18-20 23-22 20-23 23-24 25-25 25-26 25-27 23-28 27-29 29-30 30-31 +0-0 1-1 2-2 3-3 3-4 4-5 6-6 7-7 8-8 14-9 13-10 12-11 10-12 11-13 14-14 14-15 14-16 16-17 +2-1 3-2 3-3 3-4 10-5 11-6 7-7 9-8 10-9 4-10 9-11 14-13 19-14 0-15 1-16 16-17 19-18 19-19 21-20 22-21 22-22 25-23 22-24 24-25 27-26 28-27 29-28 28-29 29-30 34-31 31-32 37-33 38-34 35-35 40-36 34-37 40-39 43-40 45-41 45-42 45-43 51-44 52-45 45-46 48-47 48-48 49-50 51-51 48-52 53-53 +0-0 1-1 4-2 4-3 5-4 5-5 7-6 8-7 7-8 9-9 6-10 11-11 13-12 12-13 14-14 +0-0 1-1 2-2 5-3 6-4 4-5 8-6 7-7 3-8 9-9 2-10 12-11 12-12 12-13 14-14 14-15 15-16 16-17 19-18 24-19 17-20 21-22 22-23 22-24 26-25 24-26 16-27 24-28 31-29 27-30 29-31 28-32 29-33 31-34 32-35 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 8-7 8-8 11-9 12-10 12-11 13-12 14-13 14-14 17-15 17-16 +0-0 1-1 4-2 4-3 5-4 3-5 7-6 7-7 6-8 10-9 11-10 12-11 14-12 14-13 15-14 +0-0 2-1 2-2 4-3 +0-0 1-1 1-2 3-3 4-4 4-5 5-6 6-7 6-8 7-9 8-10 11-11 9-12 11-13 12-14 14-16 14-17 15-18 16-19 +0-0 1-1 2-2 4-3 4-4 5-5 4-6 6-7 9-8 9-9 9-10 14-11 10-12 11-13 11-14 12-15 13-16 +0-0 1-1 2-2 4-3 5-4 6-5 6-6 9-7 10-8 12-9 11-10 +0-0 2-2 8-3 1-4 5-5 7-6 2-7 9-8 11-9 10-10 9-11 20-13 11-14 16-15 16-16 18-17 17-18 15-19 21-20 +0-0 1-1 2-2 3-3 4-4 7-5 7-6 8-7 9-8 12-9 12-10 11-11 12-12 15-13 17-14 18-15 19-16 +0-0 0-1 2-2 6-3 4-4 1-5 7-6 5-7 5-8 8-9 6-10 6-11 11-12 12-13 13-14 3-15 2-16 18-17 13-18 17-19 17-20 14-21 21-22 16-25 26-27 23-28 24-29 19-30 27-32 27-33 28-34 32-36 33-37 30-39 30-40 33-41 31-42 34-43 +0-0 1-1 2-2 3-3 4-4 6-5 6-6 8-7 7-8 9-9 12-10 12-11 13-12 14-14 15-16 14-17 15-18 16-19 21-20 20-21 21-22 21-23 20-24 23-25 23-26 25-27 26-28 27-29 28-30 28-31 26-32 26-33 31-35 32-36 +0-0 4-1 9-2 11-3 3-4 6-5 8-6 7-7 9-8 11-9 11-10 16-11 14-12 16-13 17-14 18-15 19-16 19-17 22-18 24-19 24-20 24-21 25-22 26-23 +0-0 1-1 2-2 3-3 4-4 9-5 10-6 8-7 5-8 7-9 8-10 11-11 6-12 6-13 11-14 12-15 15-16 15-17 16-18 +0-0 1-1 1-2 3-3 7-4 6-5 10-6 9-7 11-8 12-9 10-10 13-11 11-12 16-13 17-14 18-15 19-16 18-17 20-18 +0-0 1-1 4-2 1-3 2-4 2-5 3-7 7-8 7-9 9-10 8-11 13-12 14-13 15-14 13-15 13-16 11-17 14-18 16-21 18-22 18-23 19-24 +0-0 1-1 5-4 2-5 7-6 9-7 7-8 7-9 12-10 13-11 13-12 8-14 13-15 16-16 9-17 20-18 16-19 20-20 23-21 24-22 24-23 25-24 26-25 22-26 28-27 29-28 25-29 25-30 31-31 28-33 30-34 28-35 30-36 32-37 33-38 +0-0 2-1 3-2 2-3 4-4 3-5 5-6 6-7 7-8 +0-0 1-1 2-2 2-3 7-4 4-5 5-6 6-7 8-8 11-9 9-10 5-11 12-12 13-13 14-14 15-15 15-16 16-17 17-18 +0-0 1-1 1-2 4-3 3-4 3-5 5-6 5-7 6-8 8-9 8-10 11-12 11-13 12-14 +0-0 16-1 2-2 2-3 3-4 7-5 4-6 6-7 6-8 9-9 8-10 10-11 10-12 11-13 11-14 11-15 13-16 14-17 11-18 16-19 15-20 18-22 19-23 +1-0 2-1 3-2 5-3 6-4 7-5 9-6 12-7 13-8 14-9 +0-0 3-1 1-2 2-3 12-8 11-9 8-10 9-11 7-12 11-13 12-14 6-15 9-16 14-17 8-18 16-19 17-20 19-21 19-22 20-23 20-24 21-25 +0-0 2-1 5-2 6-3 3-4 8-5 9-6 10-7 11-8 12-9 12-10 13-11 +1-0 4-1 3-2 0-3 8-4 5-5 12-6 13-7 12-8 13-9 16-10 16-11 17-12 +1-1 2-2 2-3 3-4 3-5 4-6 9-7 8-8 5-9 11-10 13-12 14-13 15-14 18-15 19-16 21-17 19-18 26-19 31-20 30-21 34-22 30-23 34-24 39-25 40-26 35-27 36-28 37-29 37-30 42-31 45-32 46-33 47-34 48-35 49-36 50-37 43-38 41-39 50-40 51-41 +3-1 4-2 0-3 6-4 7-5 8-6 9-7 10-8 8-9 11-10 13-11 14-12 15-13 16-14 18-15 18-16 20-17 21-18 19-19 25-20 14-21 21-22 13-24 26-26 29-27 29-28 29-29 34-30 31-31 32-32 38-35 35-36 36-37 37-38 39-39 42-40 41-41 42-42 +0-0 2-1 2-2 3-3 5-5 6-6 7-7 8-8 9-9 10-10 10-11 17-12 13-13 22-14 23-15 24-16 25-17 21-18 28-19 25-20 25-22 30-23 31-24 29-25 29-26 32-27 33-28 36-29 35-30 35-31 12-32 39-33 40-34 40-35 +0-0 1-1 2-2 5-3 9-4 3-5 7-6 8-7 23-9 24-10 28-11 13-12 17-13 31-14 21-15 35-16 22-17 27-19 29-20 10-22 12-23 32-24 37-26 37-27 +0-0 4-1 2-2 13-3 3-4 4-5 8-6 7-7 8-8 6-9 7-10 12-11 15-12 15-13 15-14 16-15 +0-0 1-1 1-2 3-3 4-4 5-5 4-6 9-7 8-8 9-9 9-10 11-11 12-12 13-13 14-14 +0-0 1-1 2-2 2-3 4-4 8-5 6-6 7-7 11-8 10-9 10-10 12-11 +0-0 1-1 3-2 4-4 6-6 8-7 13-8 6-9 9-10 12-11 12-12 14-13 12-14 16-15 15-16 19-17 18-18 18-19 23-20 22-21 24-23 25-24 +0-0 3-1 0-2 4-3 9-4 1-5 2-6 6-7 9-8 7-9 8-10 10-11 9-12 10-13 9-14 10-15 13-16 14-17 19-18 16-19 18-20 16-21 16-22 19-23 15-24 20-25 21-26 21-27 23-28 24-29 25-30 27-31 26-32 27-33 28-34 +0-0 1-1 2-2 1-3 5-5 7-6 6-7 8-8 11-10 9-12 8-13 13-14 15-15 13-16 18-18 17-19 19-20 19-21 18-22 20-24 24-25 25-26 23-27 27-28 26-29 29-30 28-31 28-32 25-33 30-34 30-35 34-36 34-37 35-38 31-39 35-40 36-41 +0-0 1-1 2-2 4-3 4-4 6-5 8-6 6-7 8-8 9-9 7-10 10-11 +1-0 1-1 7-2 1-3 6-4 2-5 3-6 9-7 8-8 8-9 11-10 14-11 14-12 15-13 +2-0 0-1 1-2 2-3 4-4 5-5 6-6 8-7 9-8 10-9 11-10 12-11 15-12 16-13 17-14 16-15 19-16 18-17 20-18 13-19 22-20 +0-0 1-1 1-2 3-3 6-5 7-6 8-7 8-9 11-10 12-11 14-12 15-13 16-14 17-15 18-16 19-17 17-18 25-20 21-21 24-22 25-23 26-24 27-26 32-27 33-28 34-29 29-30 33-31 35-32 38-33 32-34 37-35 38-36 +0-0 1-1 2-2 3-3 4-4 5-5 5-7 8-8 9-9 10-10 11-11 12-12 13-13 14-14 12-15 10-16 16-17 18-18 16-19 15-20 13-21 20-24 21-25 20-26 22-27 23-28 23-29 26-31 28-32 26-33 27-34 28-35 30-36 31-37 30-38 31-40 28-42 36-44 33-45 35-46 37-47 37-48 39-50 39-52 41-53 36-54 42-55 +0-0 1-1 2-2 7-3 7-4 10-5 12-6 16-7 20-8 18-9 20-10 20-11 24-12 26-13 28-14 29-15 +0-0 1-1 3-2 4-3 5-4 6-5 7-6 9-7 9-8 10-9 +0-0 1-1 4-2 4-3 7-4 8-5 9-6 10-7 12-8 15-9 16-10 17-11 14-12 19-13 20-14 +0-0 1-1 2-2 5-3 4-4 8-5 9-6 10-7 14-8 12-9 15-10 16-11 +1-0 1-2 5-3 7-5 9-6 16-7 10-8 14-9 15-10 17-11 17-12 +0-0 2-1 2-2 2-3 4-4 6-5 6-6 7-7 8-8 5-9 13-10 12-11 12-12 15-13 16-14 14-15 18-16 19-17 22-18 20-19 21-20 17-21 23-23 +0-0 4-1 3-2 3-3 4-4 5-5 7-6 8-7 11-8 10-9 13-10 13-11 14-12 +0-0 1-1 2-2 3-3 4-4 6-5 8-6 6-7 7-8 11-9 14-11 14-12 12-13 12-14 13-15 17-16 17-17 19-18 18-19 19-20 25-21 25-22 25-23 26-25 24-26 28-27 30-28 31-29 +0-0 1-1 3-2 5-4 4-5 4-6 10-8 11-9 6-10 9-11 12-15 10-16 17-17 12-18 10-19 16-20 19-22 20-23 20-24 25-25 26-26 22-27 23-28 23-29 24-30 25-31 31-34 32-36 30-37 33-38 32-39 33-40 34-41 +0-0 1-1 2-2 3-3 4-4 6-5 5-6 7-7 8-8 12-9 14-11 14-12 15-13 17-14 16-15 19-16 19-17 20-18 21-19 +0-0 2-1 4-2 5-3 5-4 7-5 7-6 10-7 11-8 12-9 10-10 8-11 13-12 15-13 16-14 17-15 18-16 +0-0 2-1 2-2 7-3 4-4 3-5 0-6 5-7 15-8 6-10 9-11 10-12 10-14 12-15 13-16 13-17 12-18 7-19 13-20 14-21 16-22 17-23 19-24 18-25 20-27 +0-0 1-1 1-2 4-3 7-4 8-5 6-6 10-7 11-8 9-9 10-10 14-11 10-12 20-14 13-15 16-16 17-17 16-18 17-19 16-20 22-21 22-22 23-23 22-24 26-25 27-26 25-27 27-28 31-29 30-30 29-31 31-32 32-33 32-34 33-35 +0-0 1-1 2-2 1-3 4-4 5-5 2-6 6-7 7-8 8-9 8-10 11-11 14-12 11-13 13-14 13-15 10-17 15-18 12-19 17-20 17-21 19-22 19-23 20-24 +2-0 5-1 1-2 5-3 6-4 3-5 6-6 6-7 6-8 8-9 9-10 12-11 11-12 12-13 12-14 13-15 15-16 16-17 14-18 17-19 8-21 17-22 18-23 19-24 20-25 20-26 23-27 23-28 21-29 24-30 25-31 +0-0 1-1 2-2 3-3 3-4 4-5 3-6 8-7 8-8 5-9 5-10 10-11 9-12 13-13 12-14 11-15 13-16 15-17 15-18 17-19 17-20 16-21 17-22 +0-0 2-1 2-2 4-3 5-4 6-5 8-6 8-7 9-8 +0-0 3-1 2-2 3-3 3-4 4-5 5-6 6-7 7-8 8-9 8-10 9-11 10-12 11-13 +0-0 1-1 2-2 2-3 4-4 7-5 6-6 3-7 4-8 8-9 9-10 +0-0 1-1 2-2 2-3 4-5 3-6 5-7 3-8 5-9 5-10 7-11 9-12 10-13 10-14 11-15 +1-0 2-1 2-2 2-3 4-4 5-5 6-6 6-7 7-8 8-9 8-10 9-11 11-12 10-13 11-14 11-15 12-16 14-17 14-18 15-19 16-20 17-21 18-22 19-23 20-24 21-25 22-27 +0-0 1-1 2-2 3-3 5-4 4-5 6-6 5-7 8-8 9-10 10-11 8-12 15-13 11-14 13-15 13-16 14-17 15-18 17-19 18-20 19-21 17-22 19-23 20-24 +0-0 1-1 2-2 3-3 6-4 5-5 6-6 6-7 7-8 8-9 9-10 11-11 10-12 12-13 13-14 11-15 14-16 +0-0 2-2 4-3 5-4 6-5 6-6 7-7 8-8 8-9 8-10 10-11 10-12 10-13 14-14 16-15 16-16 17-17 +0-0 1-1 2-2 4-3 5-4 6-5 3-6 8-7 8-8 11-9 13-10 12-11 13-12 13-13 17-14 17-15 19-16 18-17 21-18 21-19 22-20 23-21 +0-0 1-1 5-2 5-3 8-4 10-5 14-6 13-7 14-8 18-9 10-10 17-11 21-12 20-13 23-14 24-15 +0-0 1-1 2-2 7-3 6-4 8-6 9-7 11-8 12-9 14-10 15-11 15-12 16-13 +1-0 1-1 2-2 3-3 4-4 7-5 7-6 7-7 8-8 9-9 +0-0 2-1 4-2 4-3 0-4 5-5 5-6 8-7 7-8 10-9 9-10 10-11 11-12 12-13 13-14 14-15 15-16 16-17 17-18 +0-0 1-1 2-2 7-3 3-4 5-5 7-6 14-7 8-8 8-9 11-10 7-11 11-12 10-13 14-14 13-15 15-16 16-17 +0-0 1-1 1-2 3-3 3-4 4-5 5-6 7-7 8-8 +0-0 0-1 2-2 3-3 4-4 5-5 4-6 8-7 9-8 8-9 11-10 13-11 12-12 11-13 15-14 15-15 16-16 17-17 17-18 17-19 18-20 +0-0 0-1 2-2 7-3 4-4 5-5 6-6 9-7 10-8 11-9 11-10 12-11 12-12 14-13 17-14 17-15 18-16 +0-0 1-1 2-2 6-3 4-4 7-5 8-6 8-7 7-8 10-9 11-10 15-11 16-12 12-13 16-14 13-15 17-16 +0-0 1-1 2-2 4-3 5-4 +0-0 0-1 6-2 7-3 3-4 3-5 1-6 2-7 8-9 8-11 9-12 9-13 10-14 12-15 11-16 13-17 14-18 15-19 15-20 16-21 +0-0 2-1 2-2 3-3 7-4 5-5 6-6 10-7 11-8 12-9 14-10 15-11 18-12 18-13 19-14 +0-0 2-1 7-2 1-3 3-4 4-5 6-6 5-7 6-8 8-9 8-10 8-11 12-12 13-13 11-14 12-15 13-16 16-17 15-18 17-20 20-22 21-23 24-24 24-25 25-26 23-28 26-29 +0-0 3-1 4-2 1-3 5-4 6-5 8-6 9-7 7-8 8-9 10-10 13-11 13-12 14-13 14-14 15-15 +0-0 1-1 2-2 3-3 3-4 5-5 4-6 4-7 4-8 7-9 8-10 7-11 9-12 8-13 10-14 13-15 12-16 12-17 11-18 11-19 14-20 11-21 15-22 16-23 +0-0 1-1 2-2 6-3 5-4 6-5 8-6 9-7 10-8 14-10 15-11 16-12 18-13 19-14 19-15 20-16 +0-0 1-1 5-3 6-4 4-5 4-6 6-7 10-8 9-9 9-10 9-11 12-12 14-13 6-15 16-16 16-17 14-18 24-20 19-21 21-22 22-23 21-24 18-25 20-26 25-27 +0-0 1-1 2-2 3-3 6-5 9-6 12-7 9-8 11-9 13-10 14-11 +0-0 1-1 2-2 10-3 11-4 12-5 10-7 36-8 8-9 13-10 17-11 16-12 16-13 24-14 28-17 26-19 28-20 20-21 29-23 34-24 38-25 31-26 32-27 34-28 33-29 45-30 44-31 45-32 46-33 47-34 +0-0 0-1 0-2 2-3 3-4 +0-0 1-1 2-2 3-3 3-4 4-5 5-6 6-7 6-8 2-9 8-10 9-11 9-12 7-13 11-14 12-15 11-16 12-17 13-18 14-19 +2-0 1-1 3-2 3-3 4-4 5-5 6-6 6-7 7-8 9-9 10-10 11-11 11-12 12-13 14-14 14-15 15-16 15-17 16-18 18-19 +0-0 2-1 3-2 4-3 5-4 6-5 7-6 6-7 10-8 11-9 8-10 9-11 6-12 9-13 13-14 14-15 13-16 13-17 15-18 17-19 17-20 11-21 18-22 +0-0 1-1 2-2 3-3 5-4 6-5 7-6 10-7 10-8 11-9 12-10 12-11 14-12 16-13 16-14 17-15 19-16 21-17 22-18 21-19 24-20 25-21 +0-0 1-1 1-2 3-3 3-4 4-5 5-6 6-7 7-8 7-9 8-10 +0-0 1-1 4-2 2-3 3-4 4-5 3-6 10-7 6-8 8-9 8-10 9-11 13-12 13-13 9-14 14-15 15-16 17-17 18-19 15-21 21-22 19-24 24-25 22-26 23-27 24-28 21-29 26-30 27-31 28-32 29-33 30-34 31-35 +1-0 1-1 1-2 2-3 3-4 4-5 5-6 7-7 4-8 6-9 7-10 9-11 7-12 10-13 8-14 12-15 11-16 12-17 13-18 15-19 14-20 16-21 17-22 18-23 19-24 19-25 20-26 +0-0 1-1 1-2 3-3 4-4 8-5 8-6 12-7 10-8 2-9 13-10 14-11 16-12 14-13 17-14 18-15 17-16 22-17 23-18 22-19 24-20 25-21 26-22 27-23 +0-0 1-1 2-2 6-4 4-5 5-6 6-7 5-8 11-9 7-10 8-11 8-12 9-13 10-14 11-15 13-16 14-17 14-18 15-19 16-20 16-21 17-22 16-23 18-24 18-25 19-27 21-29 23-30 24-31 25-32 25-33 26-34 +0-0 1-1 1-2 3-3 5-5 6-6 7-7 10-8 9-9 4-10 6-11 9-12 13-13 15-14 17-15 17-16 18-17 19-18 +8-0 0-1 1-3 2-4 3-5 6-7 6-8 3-9 9-10 7-12 10-13 11-14 12-15 14-16 14-17 15-18 16-19 17-20 16-21 17-22 19-23 20-24 21-25 22-26 18-28 25-30 26-31 25-32 26-33 28-34 +0-0 1-1 2-2 3-3 5-4 3-5 7-6 7-7 12-8 11-9 8-10 10-11 14-12 12-13 13-14 16-15 13-16 13-17 21-18 22-19 19-20 17-21 24-22 21-23 21-24 26-25 23-26 27-27 26-28 28-29 30-30 31-31 32-32 33-33 34-34 35-35 35-36 36-37 37-38 36-39 38-40 +0-0 0-1 2-2 3-3 5-4 5-5 6-6 9-7 10-8 10-9 11-10 +1-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 7-8 7-9 8-10 9-11 +1-0 1-1 3-2 3-3 5-4 2-5 6-6 5-7 5-8 9-9 11-10 11-11 +0-0 1-1 3-2 3-3 8-4 9-5 6-6 6-7 7-8 5-9 8-11 16-12 14-13 10-14 15-16 15-17 24-18 12-19 21-20 20-21 21-22 22-23 23-24 28-25 26-26 31-27 29-28 32-29 31-30 35-31 36-32 +0-0 1-1 +0-0 1-1 2-2 2-3 3-4 5-5 5-6 6-7 7-8 8-9 12-10 9-11 13-12 13-13 13-14 24-15 16-16 18-17 19-18 17-19 22-20 24-21 22-22 23-23 24-24 +0-0 1-1 2-2 3-4 6-5 5-6 7-7 8-8 9-9 10-10 11-11 +0-0 1-1 8-2 4-3 5-4 9-5 7-6 9-7 11-8 12-9 13-10 14-11 15-12 16-13 +0-0 1-1 3-2 4-3 6-4 7-5 8-6 9-7 10-8 11-9 +0-0 1-1 3-3 2-4 4-5 4-6 5-7 7-8 7-9 9-10 10-11 10-12 12-13 11-14 12-15 16-16 15-18 17-19 17-20 +0-0 0-1 1-2 2-3 3-4 4-5 5-6 4-7 6-8 7-9 8-10 8-11 10-12 10-13 +0-0 1-1 6-2 3-3 7-4 7-5 8-6 6-7 6-8 11-9 10-10 12-11 16-12 10-13 29-14 18-15 26-16 20-17 30-18 23-19 23-20 25-21 21-22 22-23 29-24 28-25 30-26 31-27 +0-0 4-1 5-2 4-3 6-4 5-5 6-6 9-7 10-8 11-9 13-10 14-11 14-12 15-13 17-14 18-15 +0-0 4-1 4-2 1-3 4-4 5-5 6-6 6-7 9-8 9-9 8-10 14-11 14-12 13-13 14-14 16-15 22-16 18-18 17-19 19-20 22-21 21-22 23-23 25-24 27-25 27-26 28-27 29-28 27-29 31-30 32-31 36-32 37-33 33-34 31-35 39-37 37-38 38-39 32-40 43-41 42-42 42-43 41-44 44-45 +0-0 2-1 3-2 5-3 5-4 6-5 +2-0 1-1 2-2 3-3 4-4 5-5 6-6 6-7 11-8 8-9 11-10 12-11 13-12 13-13 15-14 15-15 16-16 +0-0 4-1 5-2 6-3 7-4 8-5 5-6 7-7 7-8 7-9 10-11 2-12 1-13 14-15 14-16 15-17 17-18 15-19 19-20 27-21 19-22 20-23 22-24 24-25 25-26 27-27 26-28 30-30 30-31 30-32 32-33 30-34 32-35 33-36 34-37 37-38 36-39 39-40 38-41 40-42 41-43 43-44 42-45 +0-0 1-1 1-2 3-4 4-5 5-7 5-8 6-9 4-10 9-11 13-13 13-14 5-15 13-16 8-17 15-18 15-19 17-20 16-21 19-22 18-23 20-24 20-25 21-26 25-28 25-29 26-30 24-31 30-32 27-33 24-34 29-35 29-36 30-37 31-38 +1-0 1-1 2-2 3-4 6-5 6-6 4-7 6-8 9-9 8-10 8-11 9-12 10-13 11-14 12-15 +0-0 1-1 2-2 1-3 3-4 4-5 5-6 2-7 6-8 8-9 9-10 10-11 10-12 11-13 12-14 13-15 14-16 16-17 17-18 18-19 19-20 17-21 18-22 26-26 23-27 24-28 25-29 29-30 26-31 26-32 30-33 31-34 31-35 32-36 +1-0 0-1 1-2 2-3 4-4 5-5 5-6 6-7 7-8 14-9 2-10 8-11 9-12 10-13 13-14 10-15 15-16 15-17 16-18 17-19 18-20 19-21 23-22 20-23 18-24 24-25 23-26 21-27 26-28 24-29 26-30 28-31 29-32 30-33 31-34 32-35 33-36 34-37 +0-0 1-1 2-2 3-3 4-4 5-5 7-7 7-8 10-9 9-10 11-11 13-12 14-13 15-14 16-15 16-16 17-17 18-18 19-19 20-20 21-21 22-22 +0-0 1-1 2-2 3-3 3-4 4-5 6-6 7-7 9-8 12-9 11-10 11-11 14-12 13-13 15-15 16-16 17-17 +0-0 2-1 3-2 5-3 5-4 8-5 7-6 8-7 9-8 10-9 12-10 13-11 14-12 15-13 11-14 16-15 16-16 18-17 22-18 21-19 25-20 24-21 23-22 23-23 27-24 27-25 29-26 +0-0 1-1 2-2 2-3 3-4 3-5 4-6 +0-0 0-1 2-2 2-3 3-5 7-6 7-7 7-8 8-9 10-12 11-13 11-14 9-15 12-16 11-17 16-18 13-19 14-20 18-21 19-22 20-23 26-24 21-25 22-26 25-28 15-29 24-30 27-31 +2-1 3-2 5-3 3-4 4-5 4-6 8-7 7-8 10-9 9-10 10-11 12-12 12-13 13-14 14-15 17-16 15-17 16-18 17-19 18-20 18-21 20-22 20-23 21-24 +0-0 4-1 0-2 4-3 4-4 6-5 7-6 6-7 13-8 9-9 9-10 10-11 12-12 17-13 18-14 19-15 16-16 18-18 19-19 22-20 20-21 21-22 23-23 +0-0 1-1 2-2 3-3 4-4 4-5 8-6 6-7 10-8 7-9 11-10 12-11 13-12 11-13 14-14 12-15 15-17 8-19 19-20 19-21 20-22 +0-0 1-1 2-2 3-3 4-4 5-5 6-6 7-7 8-8 9-9 10-10 11-11 12-12 13-13 13-14 14-15 16-16 15-17 18-18 17-19 +0-0 1-1 2-2 3-3 2-4 4-5 5-6 6-7 7-8 8-9 7-10 9-11 9-12 10-13 +1-0 2-1 1-2 2-3 3-4 4-5 6-6 5-7 6-8 8-9 8-10 10-11 12-12 12-13 14-14 15-15 16-16 17-17 19-18 19-19 20-20 21-21 +0-0 1-1 2-2 5-3 4-4 8-5 5-6 9-7 8-8 11-9 12-10 14-11 15-12 16-13 16-14 18-15 13-16 20-17 19-18 +0-0 1-1 1-2 1-3 2-4 1-5 3-6 3-7 5-9 5-10 5-11 7-12 6-13 +0-0 1-1 1-2 4-3 7-6 6-7 11-8 4-9 13-10 13-11 14-12 11-13 12-14 19-15 19-16 22-17 23-18 22-19 24-20 26-21 25-22 +0-0 1-1 1-2 2-3 3-4 6-5 7-6 6-7 7-8 7-9 8-10 +1-1 1-2 1-3 0-4 9-5 4-6 6-7 7-8 8-9 9-10 7-11 10-12 10-13 12-14 12-15 13-16 16-18 15-19 16-20 17-21 18-22 +0-0 1-1 3-2 2-3 3-4 0-5 5-6 5-7 7-8 6-9 8-10 +0-0 1-1 1-2 2-3 3-4 5-5 3-6 6-7 6-8 6-9 9-10 11-11 9-12 5-13 6-14 10-15 12-16 13-17 +0-0 1-1 0-2 2-3 6-4 6-5 6-6 8-7 12-8 9-9 10-10 7-11 11-12 13-13 15-14 16-15 14-16 16-17 18-18 +0-0 1-1 1-2 4-3 3-4 3-5 6-7 6-8 8-9 7-10 +0-0 1-1 7-2 5-3 7-4 7-5 10-6 11-7 12-8 13-9 14-10 +1-0 1-1 1-2 2-3 3-4 5-5 6-6 5-7 6-8 7-9 +0-0 2-1 1-2 4-3 7-4 8-5 6-6 8-7 10-8 +0-0 1-1 2-2 3-3 4-6 4-7 7-8 26-9 7-10 9-12 14-14 13-15 10-16 10-17 11-18 13-19 15-20 16-21 20-22 19-23 17-24 18-25 19-26 23-28 21-29 23-30 27-32 24-33 25-34 26-35 27-36 28-37 29-38 29-39 31-40 32-41 33-42 34-43 +0-0 1-1 2-2 2-3 2-4 5-5 5-6 5-7 6-8 6-9 7-10 7-11 8-12 9-13 +0-0 2-1 5-3 6-4 7-5 4-6 3-7 8-8 8-9 9-11 10-12 11-13 8-15 9-16 14-17 15-18 16-19 17-20 14-21 17-23 17-24 19-25 20-29 21-30 21-31 22-32 23-33 +0-0 1-1 2-2 3-3 3-4 3-5 5-6 7-7 6-8 9-9 8-10 10-11 +0-0 0-1 2-2 2-3 4-4 2-5 2-6 4-7 7-8 5-9 8-10 7-11 8-12 9-13 11-14 11-15 12-16 13-17 15-18 14-19 +0-0 1-1 2-2 4-3 5-4 6-5 7-6 7-7 7-8 0-9 10-10 12-11 11-12 13-13 14-14 14-15 15-16 +0-0 2-1 3-2 +0-0 1-1 1-2 2-3 3-4 4-5 4-6 6-7 7-8 7-9 9-10 11-11 9-12 10-13 11-14 13-15 13-16 15-17 16-18 17-19 18-20 18-21 19-22 +0-0 2-1 4-2 5-3 6-4 8-5 10-6 10-7 11-8 +0-0 3-1 3-2 5-3 6-4 7-5 10-6 9-7 12-8 13-9 14-10 15-11 15-13 19-14 20-15 21-16 11-17 23-18 22-19 +0-0 1-1 3-2 4-3 2-4 5-5 6-6 7-7 9-8 10-9 11-10 +0-0 1-1 2-2 2-3 4-4 6-6 6-7 7-8 9-9 10-10 10-12 10-13 12-15 13-16 15-17 16-18 13-19 15-20 27-21 15-22 16-23 21-24 22-25 23-26 26-28 24-29 27-30 27-31 28-32 29-33 26-34 25-35 30-36 +0-0 0-1 1-2 1-3 2-4 4-6 2-7 3-8 4-9 7-10 8-11 +0-0 1-1 1-2 1-3 3-4 4-5 5-6 6-7 6-8 7-9 8-10 8-11 9-12 6-13 12-14 10-15 13-16 +0-0 1-1 2-2 4-3 4-4 6-5 6-6 7-7 8-8 9-9 10-10 11-11 11-12 12-13 11-14 15-15 15-16 16-17 17-18 16-19 19-20 15-21 18-22 21-23 +1-0 0-1 6-2 6-3 6-4 7-5 8-6 10-7 5-8 4-9 9-10 12-11 9-12 1-13 11-14 12-15 13-16 14-17 14-18 12-19 14-20 19-21 16-22 17-23 18-24 19-25 20-26 20-27 21-28 20-29 24-30 23-31 21-32 25-33 +0-0 1-1 1-2 3-3 4-4 5-5 3-6 5-7 5-8 3-9 5-10 8-11 8-12 7-13 8-14 8-15 8-16 12-17 12-18 12-19 13-20 13-21 +2-0 0-1 4-2 5-3 2-4 6-5 6-6 8-7 9-8 4-9 13-10 12-11 13-12 14-13 17-14 15-15 16-16 18-17 18-18 14-19 17-20 24-21 26-22 24-23 22-24 28-25 25-26 21-27 25-28 28-29 36-30 30-31 26-32 28-33 32-34 32-35 32-36 34-37 35-38 36-39 32-40 41-42 38-43 38-44 41-45 42-48 42-49 43-50 45-51 45-52 46-53 47-54 49-55 +0-0 1-1 3-2 4-3 5-4 3-5 7-6 8-7 11-8 9-9 10-10 12-11 12-12 13-13 13-14 14-15 16-16 15-18 17-19 17-20 17-21 19-22 21-23 23-24 21-25 24-26 +0-0 1-1 4-2 2-3 5-4 6-5 6-6 7-7 10-8 8-9 11-10 12-11 12-12 10-13 9-14 14-15 13-16 16-17 13-18 17-19 18-20 19-21 20-22 21-23 22-24 +0-0 1-1 3-2 4-3 2-4 6-5 4-6 3-7 9-8 6-9 7-12 10-13 9-14 10-15 10-16 11-17 12-18 +0-0 1-1 2-2 1-3 1-4 3-5 3-6 4-7 3-8 4-9 4-10 6-11 6-12 7-13 7-14 8-15 +0-0 1-1 1-2 3-3 4-4 5-5 3-6 5-7 5-8 3-9 5-10 8-11 8-12 7-13 8-14 8-15 8-16 11-17 11-18 11-19 12-20 12-21 13-22 14-23 15-24 diff --git a/data/vocab-train.src b/data/vocab-train.src new file mode 100644 index 0000000000..ea22a58bdc --- /dev/null +++ b/data/vocab-train.src @@ -0,0 +1,11868 @@ +the 3757 +, 2902 +. 2899 +of 1974 +and 1734 +to 1711 +in 1226 +a 1086 +is 948 +that 706 +for 665 +are 490 +on 477 +with 466 +I 436 +The 426 +be 413 +this 384 +have 357 +we 351 +as 347 +by 336 +not 327 +it 304 +you 277 +will 259 +which 247 +at 241 +from 225 +an 222 +has 210 +- 206 +or 205 +can 201 +" 187 +our 180 +all 176 +was 169 +We 169 +also 167 +European 155 +This 151 +'s 149 +but 140 +Mr 140 +your 137 +been 133 +" 133 +would 133 +their 133 +It 129 +In 128 +its 126 +should 123 +more 115 +Commission 109 +do 108 +one 108 +about 103 +President 103 +there 103 +very 102 +new 100 +? 97 +s 96 +like 95 +must 91 +only 90 +time 90 +they 89 +countries 84 +who 83 +many 81 +were 80 +so 77 +up 76 +my 75 +' 75 +these 75 +Europe 74 +important 74 +other 74 +such 73 +first 73 +some 73 +people 73 +if 72 +out 72 +any 70 +States 70 +what 68 +Member 68 +use 68 +now 68 +made 64 +Council 64 +report 64 +us 64 +/ 64 +need 62 +make 61 +! 60 +into 60 +support 59 +take 59 +no 59 +Union 57 +his 57 +EU 57 +possible 57 +than 56 +Parliament 55 +work 54 +information 53 +... 53 +just 52 +when 52 +between 52 +two 51 +most 51 +years 51 +well 51 +being 50 +system 50 +them 49 +A 49 +am 49 +way 48 +over 48 +hotel 48 +then 46 +political 46 +order 46 +market 45 +There 45 +offer 45 +policy 44 +even 44 +If 44 +different 44 +As 44 +could 44 +had 43 +after 43 +he 41 +here 41 +within 41 +development 41 +because 40 +place 40 +offers 40 +therefore 39 +know 39 +those 39 +both 39 +me 38 +much 38 +good 38 +last 38 +may 38 +area 38 +But 38 +rights 38 +see 37 +right 37 +part 37 +used 37 +For 37 +Hotel 37 +where 36 +All 36 +You 36 +help 35 +does 35 +debate 35 +great 35 +today 35 +say 34 +set 34 +case 34 +products 34 +small 34 +own 34 +year 33 +find 33 +That 33 +economic 33 +want 32 +able 32 +rooms 32 +particular 32 +citizens 32 +number 32 +think 31 +problems 31 +still 31 +human 31 +available 31 +before 30 +clear 30 +without 30 +free 30 +provide 30 +issue 30 +each 30 +cannot 30 +public 30 +On 29 +against 29 +same 29 +too 29 +future 29 +fact 29 +course 29 +group 29 +under 29 +through 29 +subject 29 +means 28 +problem 28 +page 28 +country 28 +international 28 +believe 28 +women 28 +since 28 +special 27 +while 27 +Commissioner 27 +again 27 +services 27 +how 26 +necessary 26 +However 26 +energy 26 +situation 26 +get 26 +And 26 +At 26 +high 26 +change 26 +business 26 +production 25 +favour 25 +children 25 +give 25 +point 25 +company 25 +long 25 +large 25 +world 25 +view 25 +wish 25 +including 25 +Treaty 25 +Our 25 +national 24 +common 24 +process 24 +during 24 +access 24 +quality 24 +main 24 +current 24 +few 24 +third 24 +food 24 +around 24 +said 24 +social 24 +proposal 24 +day 24 +control 24 +legal 24 +vote 24 +1 24 +per 23 +cooperation 23 +financial 23 +areas 23 +Mrs 23 +room 23 +always 23 +shall 23 +close 23 +certainly 23 +ensure 23 +House 23 +rapporteur 22 +put 22 +agreement 22 +He 22 +de 22 +protection 22 +question 22 +towards 22 +further 22 +among 22 +companies 22 +city 22 +service 22 +second 22 +example 22 +What 22 +issues 21 +three 21 +local 21 +end 21 +data 21 +level 21 +2 21 +however 21 +already 21 +money 21 +visit 21 +experience 21 +following 21 +Group 20 +personal 20 +name 20 +hope 20 +better 20 +serious 20 +off 20 +These 20 +fully 20 +seems 20 +management 20 +stay 20 +needs 20 +They 19 +itself 19 +given 19 +life 19 +trade 19 +far 19 +excellent 19 +least 19 +security 19 +Madam 19 +why 19 +position 19 +With 19 +full 19 +open 19 +conditions 19 +another 19 +welcome 19 +particularly 19 +soon 19 +working 19 +10 19 +resolution 19 +certain 19 +responsible 18 +left 18 +next 18 +best 18 +beautiful 18 +product 18 +United 18 +form 18 +something 18 +measures 18 +start 18 +list 18 +family 18 +industry 18 +once 18 +account 18 +transport 18 +3 18 +every 18 +sector 18 +real 18 +together 18 +2009 17 +related 17 +line 17 +often 17 +voted 17 +accept 17 +responsibility 17 +recent 17 +government 17 +centre 17 +So 17 +using 17 +2006 17 +various 17 +One 17 +regard 17 +Members 17 +value 17 +thanks 17 +building 17 +done 17 +yet 17 +look 16 +call 16 +basis 16 +really 16 +things 16 +power 16 +regarding 16 +him 16 +When 16 +safety 16 +less 16 +law 16 +whether 16 +amendments 16 +extremely 16 +risk 16 +& 16 +guarantee 16 +4 16 +increasing 16 +Lisbon 16 +states 15 +days 15 +Committee 15 +user 15 +standard 15 +jobs 15 +Community 15 +opportunity 15 +increase 15 +completely 15 +famous 15 +several 15 +pages 15 +result 15 +global 15 +taken 15 +thank 15 +interest 15 +No 15 +contact 15 +found 15 +did 15 +ask 15 +projects 15 +activities 15 +especially 15 +state 15 +members 15 +little 15 +programme 15 +final 15 +bring 15 +Presidency 15 +directive 15 +themselves 15 +water 15 +idea 15 +until 15 +her 15 +equipped 15 +become 15 +continue 15 +whose 14 +down 14 +ladies 14 +gentlemen 14 +step 14 +To 14 +growth 14 +developing 14 +importance 14 +opinion 14 +making 14 +present 14 +restaurant 14 +15 14 +included 14 +simply 14 +elements 14 +version 14 +location 14 +standards 14 +provided 14 +simple 14 +difficult 14 +World 14 +crisis 14 +near 14 +everything 14 +total 14 +private 14 +attention 14 +allow 14 +approach 14 +America 14 +website 14 +policies 14 +institutions 14 +might 14 +technical 14 +created 14 +almost 14 +strategy 14 +technology 14 +million 14 +games 14 +provisions 13 +guests 13 +design 13 +peace 13 +include 13 +single 13 +former 13 +going 13 +period 13 +price 13 +cultural 13 +Finally 13 +death 13 +agree 13 +From 13 +terms 13 +dialogue 13 +climate 13 +cases 13 +person 13 +show 13 +come 13 +online 13 +German 13 +along 13 +procedure 13 +aspects 13 +key 13 +internal 13 +environment 13 +software 13 +allows 13 +effective 13 +facilities 13 +station 13 +heart 13 +successful 13 +markets 13 +located 13 +role 13 +rules 13 +focus 13 +quickly 13 +sea 13 +5 13 +follow 13 +includes 13 +range 13 +budget 12 +create 12 +negotiations 12 +throughout 12 +authorities 12 +customers 12 +comments 12 +although 12 +systems 12 +decision 12 +concerned 12 +care 12 +type 12 +living 12 +light 12 +requirements 12 +Amendment 12 +changes 12 +respect 12 +events 12 +hand 12 +Central 12 +Let 12 +interests 12 +known 12 +State 12 +go 12 +whole 12 +community 12 +individual 12 +listed 12 +'t 12 +adopted 12 +specific 12 +thus 12 +modern 12 +short 12 +makes 12 +town 12 +village 12 +An 12 +popular 12 +legislation 12 +tax 12 +space 12 +search 12 +deal 12 +active 12 +questions 12 +called 12 +October 12 +general 12 +network 12 +forward 12 +holiday 12 +reach 12 +greater 12 +wide 12 +live 12 +house 12 +ideal 12 +Open 12 +pressure 12 +Please 12 +Since 12 +breakfast 12 +China 12 +integration 12 +content 11 +principle 11 +action 11 +item 11 +tools 11 +solution 11 +established 11 +doing 11 +health 11 +region 11 +based 11 +12 11 +air 11 +nothing 11 +enjoy 11 +double 11 +reason 11 +essential 11 +resources 11 +above 11 +variety 11 +keep 11 +rate 11 +response 11 +myself 11 +required 11 +How 11 +old 11 +thought 11 +competition 11 +member 11 +initiative 11 +26 11 +[ 11 +easy 11 +businesses 11 +men 11 +concerning 11 +nature 11 +effect 11 +lot 11 +young 11 +center 11 +major 11 +needed 11 +goods 11 +away 11 +achieved 11 +keyword 11 +below 11 +0 11 +protect 11 +across 11 +please 11 +improve 11 +back 11 +times 11 +Is 11 +positive 11 +enough 11 +US 11 +directly 11 +Court 11 +words 11 +By 11 +address 11 +involved 11 +objective 11 +though 11 +| 11 +May 11 +presented 11 +media 11 +Affairs 10 +progress 10 +June 10 +Some 10 +became 10 +developed 10 +2007 10 +quite 10 +assistance 10 +relevant 10 +sports 10 +East 10 +web 10 +field 10 +others 10 +led 10 +results 10 +fundamental 10 +French 10 +la 10 +shown 10 +held 10 +promote 10 +remains 10 +details 10 +aware 10 +solutions 10 +appropriate 10 +programmes 10 +possibility 10 +rise 10 +30 10 +Government 10 +comes 10 +2010 10 +impact 10 +agreements 10 +minutes 10 +additional 10 +section 10 +November 10 +staff 10 +2008 10 +] 10 +professional 10 +recently 10 +job 10 +First 10 +Internet 10 +project 10 +night 10 +concerns 10 +selected 10 +rates 10 +either 10 +ski 10 +More 10 +tomorrow 10 +huge 10 +capital 10 +After 10 +study 10 +complete 10 +everyone 10 +Turkey 10 +New 10 +guaranteed 10 +Russia 10 +During 10 +bad 10 +matter 10 +ago 10 +International 10 +Berlin 10 +mainly 10 +worldwide 10 +creation 10 +violence 10 +stability 10 +society 10 +amount 10 +groups 10 +EUR 10 +allowed 10 +lines 10 +objectives 10 +according 10 +funds 9 +program 9 +apply 9 +framework 9 +having 9 +environmental 9 +speed 9 +France 9 +distance 9 +compromise 9 +workers 9 +probably 9 +kind 9 +reform 9 +reduce 9 +digital 9 +shopping 9 +electronic 9 +Middle 9 +designed 9 +born 9 +increased 9 +run 9 +shower 9 +read 9 +currently 9 +operating 9 +concern 9 +act 9 +taking 9 +application 9 +force 9 +charge 9 +understand 9 +history 9 +addition 9 +National 9 +whom 9 +education 9 +walking 9 +restaurants 9 +training 9 +meeting 9 +Today 9 +submitted 9 +produced 9 +coming 9 +text 9 +beauty 9 +applications 9 +defence 9 +8 9 +floor 9 +true 9 +heard 9 +package 9 +safe 9 +technologies 9 +situated 9 +shows 9 +Convention 9 +m 9 +Due 9 +memory 9 +American 9 +moment 9 +Germany 9 +2000 9 +hours 9 +central 9 +event 9 +check 9 +activity 9 +sustainable 9 +procedures 9 +refer 9 +points 9 +behind 9 +materials 9 +provides 9 +mind 9 +comfortable 9 +committed 9 +proposed 9 +term 9 +original 9 +past 9 +strategic 9 +facing 9 +supported 9 +round 9 +historical 9 +extra 9 +lead 9 +relations 9 +conference 9 +advertising 9 +Enjoy 9 +substances 9 +implementation 9 +My 9 +consumers 9 +Chamber 9 +di 9 +feel 9 +came 8 +site 8 +establish 8 +try 8 +efficient 8 +date 8 +organisations 8 +finally 8 +speaking 8 +11 8 +_ 8 +culture 8 +writing 8 +four 8 +actually 8 +instruments 8 +improving 8 +indeed 8 +stop 8 +primarily 8 +internet 8 +prior 8 +regional 8 +decisions 8 +regions 8 +met 8 +considered 8 +scope 8 +prices 8 +press 8 +implement 8 +side 8 +camera 8 +rather 8 +25 8 +sites 8 +began 8 +behalf 8 +Only 8 +aim 8 +Ministers 8 +Bank 8 +paid 8 +bed 8 +sectors 8 +largest 8 +regulation 8 +hot 8 +beginning 8 +union 8 +ones 8 +remain 8 +civil 8 +Therefore 8 +prevent 8 +places 8 +photos 8 +Information 8 +treatment 8 +function 8 +discrimination 8 +quiet 8 +communication 8 +Thank 8 +spend 8 +opportunities 8 +customer 8 +team 8 +century 8 +significant 8 +plans 8 +tasks 8 +carried 8 +atmosphere 8 +happy 8 +Your 8 +acceptance 8 +ever 8 +child 8 +practice 8 +Of 8 +views 8 +statement 8 +message 8 +reducing 8 +received 8 +Rules 8 +apartment 8 +parties 8 +leading 8 +pay 8 +files 8 +Now 8 +Rights 8 +solidarity 8 +notice 8 +proposals 8 +says 8 +require 8 +promoting 8 +Paris 8 +contains 8 +sale 8 +congratulate 8 +longer 8 +16 8 +equally 8 +� 8 +VAT 8 +Conference 8 +analysis 8 +del 8 +forced 8 +built 8 +West 8 +looking 8 +fair 8 +Constitution 8 +existing 8 +economy 8 +PHP 8 +Federal 7 +colleagues 7 +himself 7 +draft 7 +fisheries 7 +processing 7 +works 7 +ways 7 +body 7 +September 7 +migration 7 +prepared 7 +face 7 +motor 7 +exit 7 +months 7 +fine 7 +document 7 +seen 7 +targets 7 +consider 7 +appeal 7 +Spanish 7 +plan 7 +winter 7 +mission 7 +goal 7 +connection 7 +sound 7 +rich 7 +challenge 7 +meet 7 +definition 7 +Just 7 +perfect 7 +travel 7 +links 7 +hotels 7 +seriously 7 +upon 7 +surrounding 7 +waste 7 +capable 7 +tabled 7 +card 7 +closed 7 +consequences 7 +freedom 7 +democracy 7 +borders 7 +sure 7 +employment 7 +limit 7 +basic 7 +car 7 +carbon 7 +granted 7 +fishing 7 +cause 7 +50 7 +independent 7 +research 7 +friendly 7 +movement 7 +reduced 7 +knowledge 7 +administration 7 +aid 7 +adopt 7 ++ 7 +takes 7 +becoming 7 +offering 7 +reading 7 +tourism 7 +gas 7 +supply 7 +Mediterranean 7 +billion 7 +effects 7 +tests 7 +due 7 +improved 7 +7 7 +9 7 +doubt 7 +regulations 7 +visited 7 +sense 7 +style 7 +comfort 7 +creates 7 +mountain 7 +broad 7 +kitchen 7 +age 7 +natural 7 +Guests 7 +Welcome 7 +pleased 7 +sides 7 +avoid 7 +railway 7 +effort 7 +ten 7 +exchange 7 +discussion 7 +link 7 +dangerous 7 +agenda 7 +practical 7 +Each 7 +mentioned 7 +draw 7 +department 7 +Exchange 7 +® 7 +written 7 +green 7 +voting 7 +brings 7 +island 7 +select 7 +players 7 +latest 7 +larger 7 +outstanding 7 +Charter 7 +availability 7 +rapidly 7 +potential 7 +success 7 +credit 7 +buildings 7 +code 7 +offered 7 +reforms 7 +Swiss 7 +entire 7 +street 7 +took 7 +clearly 7 +aimed 7 +elsewhere 7 +likely 7 +forces 7 +Air 7 +qualified 7 +tool 7 +buffet 7 +Park 7 +extensive 7 +species 7 +governments 7 +20 7 +feature 7 +exhibition 7 +rental 7 +receive 7 +parking 7 +democratic 7 +land 7 +film 7 +immediately 7 +reviews 7 +Moreover 7 +founded 7 +charged 7 +competitiveness 7 +share 7 +low 7 +terrace 7 +methods 7 +implementing 6 +Two 6 +economies 6 +military 6 +begin 6 +presentation 6 +image 6 +processes 6 +book 6 +19 6 +joint 6 +reached 6 +Czech 6 +illegal 6 +paper 6 +chosen 6 +critical 6 +turn 6 +18 6 +girls 6 +separate 6 +apartments 6 +Austrian 6 +purposes 6 +penalty 6 +Act 6 +Switzerland 6 +perhaps 6 +Austria 6 +60 6 +happen 6 +big 6 +talk 6 +worth 6 +equal 6 +develop 6 +menu 6 +options 6 +features 6 +Unfortunately 6 +Software 6 +raise 6 +applied 6 +deliver 6 +cut 6 +giving 6 +creating 6 +confidence 6 +amongst 6 +table 6 +t 6 +properly 6 +regular 6 +components 6 +picture 6 +students 6 +mechanism 6 +path 6 +music 6 +banks 6 +km 6 +River 6 +asked 6 +reasons 6 +March 6 +Japan 6 +Minutes 6 +agreed 6 +war 6 +December 6 +represent 6 +challenges 6 +motion 6 +majority 6 +exclusively 6 +constant 6 +Apart 6 +highly 6 +significance 6 +maintained 6 +priority 6 +obtain 6 +St. 6 +anything 6 +networks 6 +external 6 +mode 6 +head 6 +bathroom 6 +advice 6 +relating 6 +numerous 6 +went 6 +provision 6 +continued 6 +context 6 +lasting 6 +Strategy 6 +highest 6 +waiting 6 +Minister 6 +financing 6 +commercial 6 +traditional 6 +furnished 6 +summer 6 +walk 6 +principles 6 +advantage 6 +parallel 6 +operation 6 +sun 6 +smaller 6 +packages 6 +choice 6 +approximately 6 +introduction 6 +requests 6 +recognition 6 +alone 6 +politics 6 +failure 6 +Although 6 +27 6 +structural 6 +art 6 +cost 6 +income 6 +allowing 6 +choose 6 +sport 6 +warm 6 +matters 6 +swimming 6 +river 6 +pass 6 +talking 6 +correct 6 +17 6 +Service 6 +reports 6 +status 6 +stock 6 +locations 6 +exist 6 +thing 6 +$ 6 +amendment 6 +Nevertheless 6 +carry 6 +early 6 +expected 6 +contract 6 +delivered 6 +board 6 +accommodation 6 +6 6 +committee 6 +train 6 +trying 6 +Africa 6 +ideas 6 +serves 6 +dynamic 6 +plant 6 +home 6 +agriculture 6 +week 6 +opened 6 +Ireland 6 +instance 6 +accordance 6 +Every 6 +achieve 6 +strong 6 +school 6 +speech 6 +B 6 +partner 6 +emissions 6 +lives 6 +yesterday 6 +send 6 +items 6 +review 6 +email 6 +July 6 +AG 6 +annual 6 +registration 6 +construction 6 +investment 6 +engine 6 +euro 6 +remove 6 +enter 6 +approved 6 +convinced 6 +party 6 +Barcelona 6 +caused 6 +lost 6 +benefits 6 +surrounded 6 +damage 6 +trading 6 +b 6 +origin 6 +she 6 +limited 6 +return 6 +families 6 +c 6 +computer 6 +lack 6 +inspired 6 +buy 6 +museums 6 +Article 6 +medicines 6 +unfortunately 6 +relaxing 6 +let 6 +€ 6 +guest 6 +fish 6 +rule 6 +sales 6 +never 6 +sitting 6 +affected 6 +efforts 6 +extent 5 +advance 5 +Federation 5 +capacity 5 +displayed 5 +representatives 5 +2001 5 +enlargement 5 +appears 5 +modified 5 +Linux 5 +terrorist 5 +Secondly 5 +transfer 5 +minimum 5 +CAP 5 +Vienna 5 +signal 5 +reduction 5 +fight 5 +maintain 5 +taxes 5 +St 5 +'re 5 +skiing 5 +file 5 +extended 5 +absolutely 5 +* 5 +via 5 +installed 5 +coordination 5 +informed 5 +peoples 5 +naturally 5 +scheme 5 +seeking 5 +consensus 5 +carefully 5 +officials 5 +devices 5 +pollution 5 +strict 5 +track 5 +teams 5 +Social 5 +Nations 5 +move 5 +candidate 5 +luxury 5 +operational 5 +absence 5 +lies 5 +Spain 5 +artists 5 +integrated 5 +sign 5 +language 5 +multiple 5 +Grand 5 +saying 5 +request 5 +note 5 +default 5 +asking 5 +farming 5 +X 5 +border 5 +Palace 5 +City 5 +costs 5 +signed 5 +engage 5 +monetary 5 +Democrats 5 +selection 5 +zero 5 +Windows 5 +renovated 5 +meat 5 +accepted 5 +pieces 5 +character 5 +14 5 +13 5 +display 5 +fall 5 +corruption 5 +increasingly 5 +protected 5 +Western 5 +Nos 5 +24 5 +method 5 +wood 5 +While 5 +test 5 +story 5 +calls 5 +identity 5 +persons 5 +2005 5 +Tip: 5 +Find 5 +Page 5 +combined 5 +break 5 +parents 5 +prepare 5 +List 5 +road 5 +brought 5 +Brussels 5 +staying 5 +victims 5 +British 5 +nice 5 +King 5 +January 5 +languages 5 +reservation 5 +ad 5 +hoc 5 +player 5 +followed 5 +York 5 +manner 5 +Are 5 +unique 5 +(PL) 5 +precisely 5 +poorest 5 +experiences 5 +someone 5 +generally 5 +getting 5 +stage 5 +bound 5 +efficiency 5 +surface 5 +agricultural 5 +beds 5 +maximum 5 +levels 5 +revision 5 +enable 5 +download 5 +keeping 5 +intention 5 +marketing 5 +forms 5 +urgent 5 +earlier 5 +bear 5 +accession 5 +parts 5 +migrants 5 +southern 5 +discussed 5 +flight 5 +television 5 +pools 5 +gardens 5 +Click 5 +thumbnail 5 +photo 5 +Web 5 +Those 5 +clarity 5 +protecting 5 +operators 5 +comprehensive 5 +calling 5 +affect 5 +applies 5 +Rule 5 +percent 5 +fun 5 +Fundamental 5 +enterprises 5 +roads 5 +Most 5 +constantly 5 +administrative 5 +minister 5 +model 5 +goes 5 +innovative 5 +published 5 +conflict 5 +sets 5 +industrial 5 +thinking 5 +™ 5 +responding 5 +fear 5 +growing 5 +initial 5 +monitor 5 +direct 5 +gives 5 +added 5 +phone 5 +newly 5 +decided 5 +fruit 5 +users 5 +professionals 5 +moving 5 +partnership 5 +signs 5 +De 5 +spending 5 +represented 5 +Day 5 +mention 5 +five 5 +namely 5 +binding 5 +purchase 5 +recovery 5 +100 5 +insurance 5 +introduce 5 +join 5 +game 5 +Costa 5 +techniques 5 +fresh 5 +d 5 +cars 5 +previous 5 +stated 5 +judicial 5 +demand 5 +higher 5 +official 5 +votes 5 +contribution 5 +continuing 5 +diseases 5 +helps 5 +Restaurant 5 +attempts 5 +solve 5 +budgetary 5 +sold 5 +automatic 5 +saw 5 +understanding 5 +playing 5 +patients 5 +versions 5 +{ 5 +interface 5 +Rooms 5 +requires 5 +changing 5 +argued 5 +attractive 5 +conclude 5 +Do 5 +recommendations 5 +benefit 5 +valley 5 +South 5 +cities 5 +encourage 5 +His 5 +weather 5 +Fund 5 +interesting 5 +elderly 5 +suit 5 +2003 5 +mechanisms 5 +e 5 +errors 5 +White 5 +starting 5 +difficulties 5 +a.m. 5 +transparency 5 +Monetary 5 +module 5 +respond 5 +permanent 5 +Thanks 5 +purpose 4 +refuse 4 +goals 4 +beyond 4 +Security 4 +Policy 4 +mountains 4 +transparent 4 +countryside 4 +200 4 +everyday 4 +none 4 +rest 4 +organisation 4 +actions 4 +moreover 4 +fairs 4 +pain 4 +Yet 4 +34 4 +informing 4 +utmost 4 +define 4 +attack 4 +Europeans 4 +viewed 4 +ACP 4 +Street 4 +employees 4 +payment 4 +trust 4 +virtual 4 +adequate 4 +farmers 4 +battle 4 +ability 4 +happens 4 +love 4 +speak 4 +arms 4 +instead 4 +crucial 4 +outset 4 +leader 4 +installation 4 +bar 4 +crime 4 +museum 4 +dedicated 4 +later 4 +cooking 4 +corner 4 +providing 4 +enables 4 +box 4 +tested 4 +arrival 4 +theory 4 +execute 4 +demands 4 +IBM 4 +duty 4 +(The 4 +producers 4 +disposal 4 +hole 4 +marks 4 +lobby 4 +Another 4 +proven 4 +meaning 4 +meetings 4 +client 4 +movements 4 +corporate 4 +runs 4 +base 4 +Code 4 +blame 4 +unfortunate 4 +boat 4 +Polish 4 +disaster 4 +instrument 4 +campaign 4 +historic 4 +Regulation 4 +Procedure 4 +core 4 +sell 4 +half 4 +weight 4 +Yesterday 4 +Canada 4 +conduct 4 +contemporary 4 +sentenced 4 +prison 4 +obtained 4 +specifically 4 +audio 4 +map 4 +gained 4 +adoption 4 +keys 4 +pleasure 4 +frequency 4 +loss 4 +Croatia 4 +failed 4 +exercise 4 +pure 4 +actively 4 +target 4 +millions 4 +radio 4 +NATO 4 +perform 4 +lower 4 +400 4 +expenditure 4 +explain 4 +Alliance 4 +Club 4 +enjoyed 4 +length 4 +honourable 4 +brief 4 +express 4 +balcony 4 +Bulgaria 4 +holding 4 +governance 4 +successfully 4 +aviation 4 +Obama 4 +excess 4 +Pakistan 4 +sensitive 4 +Prime 4 +tell 4 +enjoying 4 +sporting 4 +employed 4 +values 4 +domestic 4 +disabled 4 +minor 4 +nearby 4 +4-star 4 +privacy 4 +00 4 +diversity 4 +leaders 4 +front 4 +'ll 4 +desire 4 +format 4 +partners 4 +steel 4 +beach 4 +shops 4 +oil 4 +office 4 +Use 4 +forget 4 +willing 4 +vessels 4 +possibly 4 +covered 4 +depending 4 +remember 4 +University 4 +friends 4 +authority 4 +alternative 4 +ready 4 +types 4 +click 4 +chance 4 +transitional 4 +possibilities 4 +innovation 4 +responsibilities 4 +fields 4 +overview 4 +TV 4 +candidates 4 +fees 4 +Maastricht 4 +don 4 +Make 4 +correctly 4 +appreciate 4 +poverty 4 +Prodi 4 +else 4 +release 4 +manufacturing 4 +district 4 +Stock 4 +Terms 4 +governing 4 +Even 4 +experts 4 +subsidies 4 +² 4 +debating 4 +balance 4 +payments 4 +Russian 4 +Here 4 +bazaar 4 +mobility 4 +consistent 4 +top 4 +beer 4 +providers 4 +(DE) 4 +terrorism 4 +neither 4 +nuclear 4 +weapons 4 +closer 4 +class 4 +Northern 4 +seat 4 +etc 4 +High 4 +uncertainty 4 +helpful 4 +poor 4 +wrong 4 +delegation 4 +crystal 4 +vision 4 +industries 4 +R 4 +played 4 +investors 4 +label 4 +Sony 4 +Annex 4 +prosperity 4 +similar 4 +six 4 +pursuant 4 +40 4 +Auditors 4 +Frankfurt 4 +elected 4 +wild 4 +host 4 +February 4 +blocks 4 +General 4 +participation 4 +initiatives 4 +Not 4 +delegations 4 +nevertheless 4 +North 4 +answer 4 +guarantees 4 +serve 4 +flat 4 +session 4 +imagine 4 +Before 4 +temporary 4 +plants 4 +sanctions 4 +joined 4 +aims 4 +setting 4 +Station 4 +steps 4 +resource 4 +died 4 +Through 4 +participants 4 +combination 4 +restrictions 4 +clarify 4 +Human 4 +delay 4 +manufacturer 4 +favourable 4 +intended 4 +journalists 4 +widespread 4 +outdoor 4 +thermal 4 +rapid 4 +prisoners 4 +performed 4 +tobacco 4 +restored 4 +fifth 4 +seem 4 +assured 4 +1999 4 +eliminate 4 +play 4 +database 4 +extreme 4 +seller 4 +police 4 +symbols 4 +convenient 4 +copies 4 +Technical 4 +Generally 4 +color 4 +presence 4 +London 4 +repeated 4 +beaches 4 +clients 4 +Development 4 +threat 4 +Financial 4 +prominent 4 +secure 4 +S 4 +machine 4 +HTML 4 +detailed 4 +Friday 4 +proceedings 4 +faced 4 +operates 4 +infrastructure 4 +MEPs 4 +generation 4 +300 4 +containing 4 +impressive 4 +delighted 4 +Intergovernmental 4 +globalisation 4 +route 4 +intend 4 +organization 4 +cycle 4 +entered 4 +gave 4 +material 4 +daily 4 +explained 4 +dealing 4 +GmbH 4 +cheap 4 +introduced 4 +IT 4 +defined 4 +Belgium 4 +arrive 4 +superpower 4 +28 4 +Poland 4 +conclusion 4 +Republic 4 +disease 4 +earth 4 +elegant 4 +T 4 +physical 4 +Airport 4 +obviously 4 +Justice 4 +morning 4 +firm 4 +sufficient 4 +build 4 +heritage 4 +pointed 4 +injection 4 +suggestion 4 +relation 4 +wants 4 +contribute 4 +bedroom 4 +obliged 4 +Green 4 +1992 4 +religious 4 +circumstances 4 +D 4 +inclusion 4 +finance 4 +monitoring 4 +legacy 4 +bus 4 +equality 4 +valid 4 +laws 4 +hearing 4 +institution 4 +focused 4 +expensive 4 +putting 4 +task 4 +operate 4 +commitments 4 +She 4 +screen 4 +overcome 4 +weekend 4 +uses 4 +honour 4 +' 4 +View 4 +75% 4 +modules 4 +criteria 4 +recognised 4 +applying 4 +Stability 4 +add 4 +heavy 4 +learn 4 +complex 4 +causing 4 +late 4 +trees 4 +individuals 4 +produce 4 +completed 4 +Gaza 4 +April 4 +metres 4 +underway 4 +valuable 4 +undertakings 4 +preventing 4 +contributed 4 +leads 4 +gets 4 +Would 4 +infringement 4 +deals 4 +wine 4 +booking 4 +fee 4 +truly 4 +(1) 4 +Belarus 4 +GANTER 4 +acceptable 3 +legislative 3 +Fitness 3 +2004 3 +soldiers 3 +beat 3 +clothes 3 +Yes 3 +nation 3 +scale 3 +except 3 +respective 3 +Description: 3 +THE 3 +Forum 3 +despite 3 +destroyed 3 +demanding 3 +cycles 3 +minute 3 +difference 3 +justified 3 +edited 3 +Anonymous 3 +(s) 3 +Wikitravel 3 +honest 3 +arbitrary 3 +tour 3 +furniture 3 +15: 3 +Red 3 +marquis 3 +participated 3 +offences 3 +Iranian 3 +consumer 3 +started 3 +discussing 3 +Who 3 +intervention 3 +accident 3 +inhabited 3 +weak 3 +consulting 3 +'m 3 +interested 3 +boot 3 +reliable 3 +crop 3 +substantially 3 +deficit 3 +indicators 3 +marine 3 +drawn 3 +winning 3 +couple 3 +statements 3 +combat 3 +statistical 3 +exports 3 +25% 3 +recorded 3 +creature 3 +entirely 3 +Mann 3 +holidays 3 +news 3 +obvious 3 +cable 3 +) 3 +bath 3 +journey 3 +Google 3 +emphasise 3 +laser 3 +El 3 +cuisine 3 +conflicts 3 +constitute 3 +Tunisia 3 +Skype 3 +platform 3 +damaging 3 +communities 3 +notification 3 +beings 3 +consent 3 +drawing 3 +chips 3 +courses 3 +affecting 3 +urge 3 +controlling 3 +enthusiasm 3 +delete 3 +inside 3 +competitive 3 +structures 3 +equipment 3 +inform 3 +confronted 3 +meantime 3 +Securities 3 +Association 3 +i 3 +Portugal 3 +eventually 3 +Venetian 3 +il 3 +customs 3 +voluntary 3 +directives 3 +passed 3 +category 3 +Great 3 +expressed 3 +Tokyo 3 +conventional 3 +Hall 3 +sometimes 3 +licences 3 +noise 3 +Party 3 +fleet 3 +depends 3 +None 3 +reputation 3 +English 3 +explore 3 +Chinese 3 +enjoys 3 +plates 3 +devoted 3 +decide 3 +ourselves 3 +activists 3 +submit 3 +collect 3 +Website 3 +(NL) 3 +ambitious 3 +funding 3 +handed 3 +whereas 3 +hospital 3 +unforgettable 3 +input 3 +emphasised 3 +genuine 3 +constitution 3 +noted 3 +Choose 3 +cards 3 +summit 3 +al 3 +negotiating 3 +door 3 +gap 3 +foil 3 +affairs 3 +western 3 +superb 3 +fan 3 +Note: 3 +string 3 +episode 3 +# 3 +topic 3 +preferably 3 +Unix 3 +extension 3 +Romania 3 +victim 3 +Constitutional 3 +Nonetheless 3 +emerged 3 +cope 3 +abuses 3 +justice 3 +Free 3 +expect 3 +addressed 3 +restore 3 +credibility 3 +fuel 3 +peak 3 +impression 3 +gone 3 +Society 3 +resort 3 +dealt 3 +reiterate 3 +adapted 3 +amounts 3 +FIFA 3 +18th 3 +dimension 3 +luck 3 +800 3 +Christ 3 +transactions 3 +divided 3 +Hungarian 3 +ban 3 +linguistic 3 +minorities 3 +perspective 3 +truth 3 +Recent 3 +circuit 3 +shooting 3 +Furthermore 3 +baroque 3 +tower 3 +organizing 3 +exceptional 3 +8,000 3 +wind 3 +enabling 3 +cross-border 3 +Order 3 +fishermen 3 +owners 3 +liable 3 +suffered 3 +approval 3 +documents 3 +opposed 3 +snow 3 +hiking 3 +condition 3 +looked 3 +treaty 3 +effectively 3 +importantly 3 +acknowledge 3 +contrary 3 +ears 3 +Oliver 3 +2011 3 +characteristics 3 +(or 3 +translation 3 +determined 3 +seminars 3 +easily 3 +actual 3 +Yugoslavia 3 +drives 3 +picturesque 3 +golf 3 +Power 3 +rivers 3 +hazard 3 +Electronic 3 +Foundation 3 +Have 3 +criticism 3 +moments 3 +partnerships 3 +collaboration 3 +Radio 3 +clause 3 +bored 3 +usually 3 +ratifying 3 +Serbia 3 +OS 3 +cone 3 +deposit 3 +lake 3 +reconstruction 3 +romantic 3 +humanitarian 3 +spectrum 3 +Products 3 +incomes 3 +harmonise 3 +attached 3 +mandate 3 +Times 3 +Microsoft 3 +presentations 3 +automatically 3 +ensuring 3 +hut 3 +convenience 3 +Lido 3 +500 3 +proper 3 +strike 3 +ball 3 +roll 3 +congratulations 3 +sort 3 +Libya 3 +immigration 3 +student 3 +ancient 3 +La 3 +Situated 3 +scientific 3 +supplied 3 +drugs 3 +sovereign 3 +drinking 3 +ice 3 +drink 3 +Several 3 +hundred 3 +primary 3 +navigation 3 +increases 3 +placed 3 +Vietnam 3 +Airlines 3 +Can 3 +fulfil 3 +nor 3 +remained 3 +centres 3 +shore 3 +outside 3 +managing 3 +Eastern 3 +min 3 +organic 3 +compared 3 +expression 3 +throw 3 +buying 3 +Process 3 +Baltic 3 +detail 3 +Commissioners 3 +rain 3 +enterprise 3 +agency 3 +Chile 3 +creativity 3 +Trade 3 +insist 3 +Made 3 +UK 3 +Labour 3 +learning 3 +Cyprus 3 +sincere 3 +star 3 +influence 3 +friend 3 +endorse 3 +brutal 3 +option 3 +wines 3 +excluded 3 +middle 3 +reserve 3 +fat 3 +score 3 +factors 3 +Britain 3 +debt 3 +medium-sized 3 +2020 3 +implemented 3 +marked 3 +traffic 3 +fines 3 +drive 3 +white 3 +renewable 3 +patents 3 +investments 3 +sensible 3 +warning 3 +lessons 3 +aside 3 +exchanges 3 +regulatory 3 +spread 3 +anyone 3 +30% 3 +inspection 3 +stays 3 +considering 3 +eyes 3 +race 3 +potentially 3 +remote 3 +referred 3 +contain 3 +wider 3 +audience 3 +winners 3 +breakthrough 3 +priorities 3 +brands 3 +settings 3 +Many 3 +Services 3 +ECB 3 +inflation 3 +pdf 3 +Easter 3 +welcomed 3 +mayor 3 +club 3 +positions 3 +easyswap 3 +ought 3 +specially 3 +appointed 3 +F 3 +defaults 3 +currency 3 +Both 3 +stands 3 +tied 3 +bodies 3 +background 3 +Ukraine 3 +Firstly 3 +measure 3 +diverse 3 +Contact 3 +Ski 3 +investigation 3 +sub-categories 3 +contextually 3 +grammatically 3 +meaningfully 3 +phrase 3 +highlighting 3 +arise 3 +launch 3 +regime 3 +temperature 3 +Vostermans 3 +understood 3 +books 3 +satisfactory 3 +promptly 3 +visual 3 +Company 3 +Summit 3 +strengthening 3 +assembly 3 +specialized 3 +proportion 3 +won 3 +Consumer 3 +videos 3 +suggestions 3 +ensures 3 += 3 +realised 3 +peaceful 3 +protest 3 +Other 3 +entry 3 +Rome 3 +revenues 3 +thereby 3 +discuss 3 +enhanced 3 +patent 3 +involving 3 +store 3 +exclusive 3 +Centre 3 +Country 3 +fashion 3 +lift 3 +percentage 3 +instructions 3 +prove 3 +interior 3 +black 3 +object 3 +Deutsche 3 +Börse 3 +respectively 3 +Setting 3 +raised 3 +eastern 3 +strengthen 3 +avenues 3 +average 3 +fraud 3 +associated 3 +flexible 3 +arrangements 3 +fantastic 3 +abuse 3 +Australia 3 +served 3 +so-called 3 +sufficiently 3 +aspect 3 +un 3 +isn 3 +surveillance 3 +hour 3 +en 3 +dried 3 +basically 3 +imports 3 +communications 3 +ease 3 +accessories 3 +alcohol 3 +doors 3 +hopes 3 +part-session 3 +EPP 3 +merely 3 +youth 3 +placement 3 +quarters 3 +skies 3 +property 3 +heads 3 +Israel 3 +fail 3 +maintenance 3 +cancer 3 +multimedia 3 +begins 3 +distribution 3 +bridge 3 +taught 3 +quota 3 +badly 3 +e.g. 3 +dawn 3 +executive 3 +wholly 3 +northern 3 +patient 3 +grounds 3 +instantly 3 +hostels 3 +delivery 3 +reported 3 +trip 3 +Once 3 +device 3 +vast 3 +ratification 3 +collection 3 +modify 3 +missing 3 +touch 3 +serving 3 +worthy 3 +accounts 3 +syndrome 3 +forgotten 3 +2012 3 +Growth 3 +relationship 3 +} 3 +turned 3 +assure 3 +Storage 3 +shop 3 +size 3 +assessment 3 +Netherlands 3 +suggest 3 +Portuguese 3 +Also 3 +Established 3 +attractions 3 +unacceptable 3 +reinforced 3 +officer 3 +avoided 3 +interactive 3 +QuarkXPress 3 +tried 3 +distributed 3 +advocated 3 +routes 3 +Period 3 +direction 3 +parliaments 3 +combine 3 +fill 3 +shares 3 +hard 3 +letters 3 +branch 3 +(see 3 +registered 3 +got 3 +kilometers 3 +gender 3 +trips 3 +renowned 3 +resorts 3 +(at 3 +ultimately 3 +137 3 +professions 3 +afternoon 3 +Home 3 +encouraging 3 +save 3 +finest 3 +prefer 3 +August 3 +completion 3 +limiting 3 +mother 3 +quotas 3 +situations 3 +optimum 3 +adult 3 +oldest 3 +leave 3 +joining 3 +Systems 3 +unit 3 +recommend 3 +Giorgio 3 +argument 3 +discover 3 +foot 3 +mostly 3 +Communism 3 +respects 3 +colours 3 +Institute 3 +notes 3 +shortening 3 +representative 3 +territories 3 +tribute 3 +sand 3 +factor 3 +stops 3 +branches 3 +drinks 3 +visitors 3 +Summer 3 +update 3 +Samsung 3 +desired 3 +Public 3 +transportation 3 +attacks 3 +Asia 3 +Managing 3 +pilot 3 +Such 3 +Mac 3 +Learn 3 +textile 3 +jointly 3 +planning 3 +reality 3 +harga 3 +Euro 3 +connect 3 +trend 3 +testing 3 +harbour 3 +Lukashenko 3 +preparations 3 +Call 3 +unemployment 3 +differences 3 +liberties 3 +telephone 3 +Based 3 +responses 3 +university 3 +Balzan 3 +lots 3 +wisdom 3 +Directive 3 +felt 3 +reject 3 +euros 3 +word 3 +1997 3 +compensation 3 +• 3 +Luggage 3 +hearty 3 +skills 3 +lose 3 +lock 3 +discussions 3 +bit 3 +talks 3 +tone 3 +concept 3 +loans 3 +authorised 3 +wave 3 +vital 3 +complicated 3 +coffee 3 +Israeli 3 +presidency 3 +flights 3 +series 3 +faith 3 +welfare 3 +Fully 3 +enhance 3 +y 3 +mixed 3 +Control 3 +legally 3 +Friendly 3 +Economic 3 +greatest 3 +Amsterdam 3 +spring 3 +removed 3 +armed 3 +metals 3 +tourist 3 +dancing 3 +necessarily 3 +extraordinary 3 +images 3 +generate 3 +inequality 3 +Center 3 +(the 3 +opening 3 +Children 3 +diabetes 3 +adults 3 +layout 3 +square 3 +regards 3 +Shambhala 3 +adjusted 3 +era 3 +Village 3 +classroom 3 +unanimous 3 +economics 3 +technique 3 +@ 3 +subsidiarity 3 +School 3 +Hence 3 +regularly 3 +incredible 3 +functional 3 +Double 3 +modifier 3 +rail 3 +adjust 3 +Events 3 +practices 3 +arrived 3 +Workflow 3 +respected 3 +becomes 3 +Greek 3 +com 3 +medium 3 +Jean 3 +Piazza 3 +'ve 3 +technicians 3 +Korea 3 +San 3 +yourself 3 +gain 3 +Suites 3 +fears 3 +ye 3 +looks 3 +TEC7 3 +tea 3 +allies 3 +indicate 3 +powers 3 +educational 3 +colors 3 +commitment 3 +reference 3 +engineering 3 +IG 3 +por 3 +gallery 3 +victory 3 +diesel 3 +Ministry 3 +consumption 3 +enemy 3 +PlayStation 3 +extend 3 +RPG 3 +të 3 +china 3 +FI 3 +null 2 +laid 2 +Trainer 2 +Instructor 2 +Italian 2 +Postural 2 +Gym 2 +Stretching 2 +collaborating 2 +sleep 2 +budgets 2 +purchased 2 +UN 2 +(FR) 2 +thanking 2 +entrusted 2 +shadow 2 +tattoo 2 +preliminary 2 +FIRST 2 +assurances 2 +Foreign 2 +distinguished 2 +Discover 2 +intuitive 2 +coast 2 +effectiveness 2 +decision-making 2 +preparing 2 +renew 2 +tempted 2 +vineyards 2 +exhibiting 2 +perceived 2 +moon 2 +Bone 2 +Bruise 2 +Technique 2 +1995 2 +anywhere 2 +correction 2 +bone 2 +genuinely 2 +stance 2 +criminal 2 +immigrants 2 +broader 2 +Fischler 2 +rural 2 +tackle 2 +definitely 2 +John 2 +openly 2 +Later 2 +engines 2 +north 2 +cross 2 +(IT) 2 +salt 2 +sexy 2 +silly 2 +wonderful 2 +fundamentally 2 +lesson 2 +mix 2 +businessman 2 +enlarged 2 +NGOs 2 +Look 2 +Dublin 2 +pub 2 +Among 2 +formula 2 +appropriately 2 +powerful 2 +sharing 2 +export 2 +missions 2 +informative 2 +delays 2 +Why 2 +slowly 2 +Italy 2 +40% 2 +1958 2 +warmly 2 +expressive 2 +(If 2 +stress 2 +feeling 2 +distinct 2 +surroundings 2 +Doyle 2 +expects 2 +bunch 2 +fallen 2 +fire 2 +destination 2 +plugin 2 +PDF 2 +reader 2 +precision 2 +measurements 2 +factory 2 +easier 2 +ground 2 +à 2 +carte 2 +lunch 2 +dinner 2 +Display 2 +undermining 2 +obstacle 2 +Egypt 2 +API 2 +dramatic 2 +Copenhagen 2 +cells 2 +Taliban 2 +ecological 2 +belong 2 +earned 2 +asylum 2 +farm 2 +Place 2 +combinations 2 +brain 2 +Save 2 +Travel 2 +abroad 2 +SIM 2 +Golf 2 +bug 2 +ahead 2 +gym 2 +relax 2 +Wi-Fi 2 +zone 2 +negotiated 2 +alloys 2 +magnetic 2 +threshold 2 +crossed 2 +Product 2 +partial 2 +warn 2 +observers 2 +expressly 2 +elections 2 +reportedly 2 +commanders 2 +operations 2 +realistic 2 +subtle 2 +declare 2 +proceed 2 +proposing 2 +Year 2 +unemployed 2 +medical 2 +bungalow 2 +strength 2 +analyze 2 +conciliation 2 +awarded 2 +emerges 2 +noting 2 +insecure 2 +Question 2 +Teachers 2 +boys 2 +Conduct 2 +Greece 2 +itinerary 2 +simultaneously 2 +investigate 2 +privately 2 +REACH 2 +colleague 2 +ranked 2 +fourth 2 +amend 2 +Application 2 +Protocol 2 +fiber 2 +fabric 2 +fabrics 2 +Tuesday 2 +sensors 2 +crimes 2 +arrested 2 +taste 2 +According 2 +abundant 2 +finds 2 +resides 2 +hearts 2 +plate 2 +18: 2 +gautam 2 +Training 2 +Coalition 2 +transnational 2 +audiovisual 2 +Instead 2 +hold 2 +informal 2 +restrict 2 +suspended 2 +sentences 2 +Webmaster 2 +register 2 +der 2 +membership 2 +personally 2 +identifying 2 +Audio 2 +arranged 2 +composition 2 +Roman 2 +comparative 2 +Subject: 2 +Dutch 2 +micro-credit 2 +Karel 2 +Gott 2 +concert 2 +Prague 2 +02 2 +http: 2 +banners 2 +remaining 2 +ill 2 +pride 2 +ambience 2 +occur 2 +Arab 2 +Given 2 +developments 2 +observed 2 +Southern 2 +African 2 +Customs 2 +revenue 2 +Kinnock 2 +EMC 2 +losses 2 +Cathedral 2 +temple 2 +participating 2 +stations 2 +safeguarding 2 +controls 2 +postal 2 +terminal 2 +boy 2 +Latin 2 +converted 2 +revealed 2 +Quality 2 +wise 2 +cold 2 +nearly 2 +23 2 +Liberals 2 +foods 2 +Arabic 2 +Art 2 +paint 2 +socket 2 +") 2 +nail 2 +Live 2 +execution 2 +intensive 2 +beef 2 +h 2 +booth 2 +H. 2 +Wolf 2 +demonstrated 2 +failures 2 +usual 2 +with: 2 +70 2 +variation 2 +symbolic 2 +manufacturers 2 +deny 2 +integrity 2 +Amendments 2 +56 2 +satisfy 2 +belonging 2 +religion 2 +season 2 +landing 2 +wherever 2 +producing 2 +designs 2 +high-quality 2 +grateful 2 +transposed 2 +designation 2 +landscape 2 +onto 2 +paste 2 +enhancing 2 +Gela 2 +explains 2 +Abbas 2 +Fayyad 2 +demonstrate 2 +exceptionally 2 +cash 2 +stored 2 +incoming 2 +banking 2 +month 2 +rely 2 +dishes 2 +supports 2 +distinctive 2 +establishes 2 +detection 2 +casino 2 +saith 2 +prevention 2 +Post 2 +Zermatt 2 +Mountain 2 +slots 2 +Hostel 2 +Very 2 +observe 2 +solid 2 +usage 2 +timely 2 +enormous 2 +pursue 2 +laudable 2 +atrocities 2 +science 2 +yes 2 +3D 2 +labour 2 +showing 2 +camp 2 +Mechelen 2 +tailored 2 +statistics 2 +managed 2 +attendees 2 +exhibitors 2 +premiere 2 +tenth 2 +rose 2 +folding 2 +claims 2 +mutual 2 +judgments 2 +Airways 2 +traveller 2 +arrange 2 +portion 2 +resistance 2 +targeted 2 +Numerous 2 +Nordic 2 +Laurin 2 +closely 2 +hide 2 +deficiencies 2 +requirement 2 +unanimity 2 +Does 2 +gluing 2 +lifelong 2 +passport 2 +managers 2 +bearings 2 +contributors 2 +pending 2 +maize 2 +promising 2 +guidance 2 +coaching 2 +in-house 2 +workshops 2 +distances 2 +Surely 2 +Christmas 2 +wanting 2 +biking 2 +phrases 2 +Copyright 2 +parliament 2 +surely 2 +Indeed 2 +interpreted 2 +raw 2 +stimulate 2 +optimal 2 +recordings 2 +stages 2 +claim 2 +recognizes 2 +generations 2 +Chairman 2 +Petitions 2 +lava 2 +continuously 2 +aged 2 +tank 2 +reveals 2 +banana 2 +native 2 +makeshift 2 +villages 2 +dates 2 +Age 2 +tents 2 +whatsoever 2 +Space 2 +remembered 2 +fix 2 +showed 2 +component 2 +expiry 2 +suggests 2 +visa 2 +Falric 2 +Square 2 +guestrooms 2 +denied 2 +wake 2 +tragic 2 +incident 2 +equivalent 2 +socially 2 +pillows 2 +essence 2 +scenic 2 +Adriatic 2 +admission 2 +constructed 2 +scanner 2 +tariff 2 +obligation 2 +timeframe 2 +bearing 2 +Dolomiti 2 +oft 2 +lets 2 +derogations 2 +importing 2 +(not 2 +42 2 +hero 2 +post 2 +purely 2 +passengers 2 +rustic 2 +Footprint 2 +trusted 2 +sustainability 2 +analyses 2 +seeks 2 +massive 2 +enabled 2 +thousand 2 +spoke 2 +Pact 2 +identification 2 +Up 2 +indicated 2 +observations 2 +45 2 +finished 2 +supposed 2 +rid 2 +Cold 2 +bananas 2 +attacking 2 +drilling 2 +obtaining 2 +Geneva 2 +sights 2 +castle 2 +Without 2 +tonnes 2 +generic 2 +characterized 2 +briefly 2 +terraces 2 +judgment 2 +dispel 2 +servers 2 +documentation 2 +domains 2 +relaxation 2 +8th 2 +panoramic 2 +90% 2 +(UX) 2 +highlight 2 +accomodation 2 +V 2 +competence 2 +Blue 2 +travellers 2 +cleanest 2 +functioning 2 +stone 2 +stores 2 +ongoing 2 +confident 2 +resolved 2 +panel 2 +technically 2 +start-up 2 +camping 2 +deciding 2 +illuminated 2 +drastically 2 +glad 2 +resolutions 2 +owned 2 +Leading 2 +Hotels 2 +overnight 2 +Camping 2 +Aarhus 2 +workforce 2 +Under 2 +voters 2 +renting 2 +liberalisation 2 +CIA 2 +Domestic 2 +DC 2 +chairs 2 +Book 2 +hall 2 +Metro 2 +remedy 2 +(for 2 +deserves 2 +Attorney 2 +happened 2 +happening 2 +integral 2 +envisaged 2 +hereunder 2 +petrol 2 +driving 2 +loud 2 +Doha 2 +Enter 2 +author 2 +officially 2 +wealth 2 +exceed 2 +conversion 2 +topics 2 +Socialists 2 +technological 2 +exactly 2 +sent 2 +ours 2 +continuous 2 +pricing 2 +confidential 2 +Media 2 +fellow 2 +consistently 2 +neighbourhood 2 +meant 2 +coexistence 2 +declared 2 +stake 2 +sincerely 2 +launching 2 +calendar 2 +Girls 2 +artistic 2 +painting 2 +clubs 2 +regret 2 +proactive 2 +clauses 2 +MEP 2 +1989 2 +stream 2 +3-star 2 +opposite 2 +van 2 +(including 2 +Verhofstadt 2 +appear 2 +foreigners 2 +configuration 2 +reasonable 2 +drivers 2 +moderate 2 +pursuing 2 +seventh 2 +distorted 2 +decides 2 +invitation 2 +amounting 2 +Malta 2 +Office 2 +setup 2 +Mannheim 2 +capture 2 +distortion 2 +lens 2 +convention 2 +hands 2 +Interior 2 +lighting 2 +customized 2 +Nor 2 +reaction 2 +residues 2 +Athens 2 +Paul 2 +17: 2 +bronze 2 +Smet 2 +Danish 2 +reserves 2 +EMU 2 +puts 2 +Part 2 +kitchenette 2 +useful 2 +significantly 2 +preserved 2 +Wall 2 +(LT) 2 +truck 2 +Barroso 2 +facilitate 2 +tonnage 2 +toll 2 +faster 2 +thousands 2 +e.V. 2 +condemn 2 +© 2 +.... 2 +cosmetic 2 +Higher 2 +Munich 2 +cosmopolitan 2 +continues 2 +opponents 2 +judiciary 2 +liked 2 +seasons 2 +Ventilation 2 +fans 2 +receipt 2 +smooth 2 +distinction 2 +compliance 2 +fund 2 +circuits 2 +breakers 2 +(8 2 +switch 2 +gauge 2 +clicking 2 +anxious 2 +Bangkok 2 +flexibility 2 +workshop 2 +Guantánamo 2 +transferred 2 +hesitation 2 +Protection 2 +magnificent 2 +Rua 2 +austerity 2 +stopped 2 +Department 2 +systemic 2 +extremists 2 +Customers 2 +distribute 2 +spare 2 +diplomatic 2 +Language 2 +Finnish 2 +interpreting 2 +roof 2 +secondary 2 +numbers 2 +Sea 2 +seize 2 +negative 2 +partly 2 +Following 2 +defines 2 +guidelines 2 +Neither 2 +placing 2 +Buy 2 +nine 2 +pet 2 +pets 2 +urgently 2 +unable 2 +verify 2 +wages 2 +Mastorakis 2 +Sport 2 +Gate 2 +Alcoa 2 +Data 2 +concrete 2 +Force 2 +rent 2 +decades 2 +ministers 2 +Continental 2 +animals 2 +blood 2 +Eurex 2 +economists 2 +implications 2 +Sierra 2 +Nevada 2 +virtually 2 +impossible 2 +dialog 2 +Los 2 +Sydney 2 +mood 2 +depressed 2 +publicity 2 +Full 2 +Should 2 +PPE 2 +carries 2 +calculation 2 +Everyone 2 +harmonisation 2 +whilst 2 +appreciated 2 +hunters 2 +sandy 2 +Materials 2 +urban 2 +touristic 2 +Resort 2 +Deepstrider 2 +giants 2 +(SK) 2 +stayed 2 +Standard 2 +insider 2 +Supervisory 2 +Authority 2 +planned 2 +trends 2 +improvements 2 +causes 2 +somewhat 2 +carrier 2 +extensively 2 +frozen 2 +Structural 2 +Funds 2 +disabilities 2 +07: 2 +hostel 2 +Transport 2 +Tourism 2 +Design 2 +Books 2 +(to 2 +viable 2 +Thus 2 +duly 2 +stakeholders 2 +Budget 2 +supplements 2 +organizer 2 +Royal 2 +dans 2 +deployed 2 +filter 2 +passage 2 +organisational 2 +dream 2 +commenced 2 +licence 2 +maybe 2 +Les 2 +authoritarian 2 +receives 2 +sqm 2 +distort 2 +tariffs 2 +expertise 2 +(if 2 +fails 2 +inability 2 +disasters 2 +MPs 2 +robot 2 +lay 2 +strategies 2 +iPhone 2 +websites 2 +Panasonic 2 +Signature 2 +Organisation 2 +storage 2 +expectations 2 +rejection 2 +highlights 2 +Nicole 2 +advisers 2 +Europe-wide 2 +piece 2 +luggage 2 +names 2 +lesser 2 +Meeting 2 +essentially 2 +partitions 2 +listen 2 +Iran 2 +exhausted 2 +toward 2 +nuts 2 +amazed 2 +admit 2 +unto 2 +Isaiah 2 +restoration 2 +tab 2 +paragraphs 2 +promoted 2 +sample 2 +translating 2 +parliamentary 2 +renounce 2 +promised 2 +ends 2 +inequalities 2 +lie 2 +piracy 2 +proprietary 2 +park 2 +Tanzania 2 +feels 2 +bureaucratic 2 +buried 2 +hidden 2 +addresses 2 +forum 2 +letter 2 +intentional 2 +walks 2 +streets 2 +signatures 2 +symbol 2 +voice 2 +consists 2 +answers 2 +Global 2 +Vila 2 +iron 2 +aluminium 2 +Parallel 2 +phenomenon 2 +paragraph 2 +Rather 2 +career 2 +parks 2 +labelling 2 +1993 2 +linked 2 +authorized 2 +regulators 2 +agencies 2 +encounter 2 +clear-cut 2 +feet 2 +unaware 2 +moved 2 +withdraw 2 +win 2 +acquiring 2 +cathedral 2 +squares 2 +confirmed 2 +Ambassador 2 +browser 2 +cabinet 2 +shuttle 2 +cuts 2 +councils 2 +financially 2 +destruction 2 +forests 2 +32 2 +caves 2 +insisted 2 +concluded 2 +predictions 2 +mainstream 2 +}: 2 +e-mail 2 +stalls 2 +Turkish 2 +Spa 2 +pool 2 +circular 2 +treatments 2 +el 2 +.. 2 +morally 2 +Leipzig 2 +emphatic 2 +optional 2 +windows 2 +wide-ranging 2 +channels 2 +disputes 2 +acting 2 +welcomes 2 +preparation 2 +in-depth 2 +violation 2 +Objective 2 +cleaned 2 +uphold 2 +architectural 2 +Food 2 +egg 2 +directed 2 +comprises 2 +suites 2 +floors 2 +declarations 2 +Campo 2 +edge 2 +relevance 2 +acquire 2 +speakers 2 +competent 2 +careful 2 +gambling 2 +Van 2 +ve 2 +struggled 2 +Flash 2 +ll 2 +precise 2 +JavaScript 2 +toilet 2 +presently 2 +weeks 2 +dissidents 2 +chart 2 +institutional 2 +CityGuide 2 +ARBURG 2 +pneumatic 2 +Lisboa 2 +acclaimed 2 +element 2 +church 2 +charm 2 +performance 2 +feasibility 2 +conform 2 +C 2 +Market 2 +stringent 2 +Board 2 +biofuels 2 +isolated 2 +activate 2 +opens 2 +fiscal 2 +threatens 2 +undoubtedly 2 +accessible 2 +sofa 2 +columns 2 +corporations 2 +argue 2 +Saturday 2 +liquid 2 +loan 2 +Caucasus 2 +glass 2 +agrees 2 +sushi 2 +Japanese 2 +Hilton 2 +shot 2 +bank 2 +smallest 2 +catalogue 2 +entries 2 +equaliser 2 +chances 2 +grow 2 +recognise 2 +desk 2 +engineers 2 +programs 2 +detected 2 +environments 2 +IGC 2 +belongs 2 +municipality 2 +bicycles 2 +Baht 2 +academic 2 +print 2 +Increased 2 +LED 2 +profiles 2 +cooling 2 +accounting 2 +Integration 2 +supplies 2 +article 2 +injury 2 +conducted 2 +controversial 2 +Christian 2 +Armani 2 +solemn 2 +celebrations 2 +amended 2 +9th 2 +title 2 +00: 2 +03: 2 +watch 2 +plastics 2 +sorts 2 +record 2 +dropped 2 +degree 2 +plenary 2 +inferior 2 +outlook 2 +Paper 2 +safeguard 2 +designer 2 +accompanied 2 +(under 2 +Santa 2 +flower 2 +loading 2 +deliveries 2 +killing 2 +Despite 2 +Belgian 2 +shortly 2 +weaknesses 2 +Fuengirola 2 +monuments 2 +Harbour 2 +Where 2 +Arabia 2 +Send 2 +restoring 2 +communicable 2 +Obviously 2 +joc 2 +selectati 2 +refused 2 +Regional 2 +assist 2 +comply 2 +departments 2 +Business 2 +Mark 2 +unfold 2 +entrance 2 +server 2 +source 2 +seeing 2 +authorisation 2 +Javascript 2 +combating 2 +outlined 2 +schools 2 +cohesion 2 +transformation 2 +directions 2 +centuries 2 +astonishing 2 +graphic 2 +cell 2 +arrests 2 +ordinary 2 +Poettering 2 +guesthouse 2 +M 2 +occupy 2 +ploplinux 2 +wireless 2 +specification 2 +conferences 2 +efficiently 2 +logistics 2 +rejected 2 +situation; 2 +water; 2 +Careful 2 +cots 2 +Spacious 2 +instructors 2 +talent 2 +passion 2 +suspicion 2 +Private 2 +Nobody 2 +awesome 2 +video 2 +fastest 2 +thoroughly 2 +Still 2 +Bingo 2 +Well 2 +modular 2 +berlaku 2 +roomed 2 +Hans 2 +Leitner 2 +asphalt 2 +coverage 2 +identical 2 +reception 2 +remarks 2 +oversight 2 +oppose 2 +giant 2 +sono 2 +con 2 +o 2 +creative 2 +telling 2 +Casting 2 +typically 2 +fell 2 +boom 2 +perception 2 +holes 2 +pave 2 +treaties 2 +prune 2 +flow 2 +emotions 2 +Europa 2 +establishment 2 +considerable 2 +succeed 2 +BSE 2 +Search 2 +picked 2 +spa 2 +sources 2 +emphasis 2 +Goethe 2 +imprisonment 2 +reply 2 +mere 2 +rationalism 2 +concentrated 2 +housing 2 +ordered 2 +discount 2 +medium-term 2 +ending 2 +Located 2 +Sofia 2 +Culture 2 +overlooking 2 +wool 2 +attentive 2 +balconies 2 +reflection 2 +strictly 2 +criterion 2 +generous 2 +rapporteurs 2 +veto 2 +wrote 2 +invites 2 +manufactured 2 +Heads 2 +adjustments 2 +Kingdom 2 +starts 2 +alike 2 +legs 2 +developer 2 +lucky 2 +intervals 2 +revolution 2 +shared 2 +retort 2 +refrigeration 2 +oven 2 +sails 2 +rolling 2 +Bangladesh 2 +helped 2 +preserve 2 +imposed 2 +emerging 2 +Israelis 2 +A. 2 +compete 2 +firms 2 +tropical 2 +biggest 2 +Andalusia 2 +Fine 2 +distortions 2 +sole 2 +assign 2 +liability 2 +units 2 +army 2 +heading 2 +twin 2 +auction 2 +trouble 2 +Agriculture 2 +biodiversity 2 +mouse 2 +pointer 2 +missed 2 +dimensions 2 +well-known 2 +adjustment 2 +consult 2 +mills 2 +legitimate 2 +000 2 +simplification 2 +transferring 2 +advantages 2 +Education 2 +singers 2 +song 2 +butter 2 +15% 2 +internally 2 +description 2 +meals 2 +evolving 2 +50.00 2 +achieving 2 +accurate 2 +49 2 +Bar 2 +24-Hour 2 +Front 2 +Desk 2 +Terrace 2 +Non-Smoking 2 +Elevator 2 +Express 2 +Check-In 2 +Check-Out 2 +Safety 2 +Deposit 2 +Box 2 +Heating 2 +Gay 2 +Airconditioning 2 +defending 2 +Industrial 2 +Dr. 2 +examined 2 +election 2 +organised 2 +red 2 +darken 2 +underground 2 +gross 2 +ideals 2 +catch 2 +negotiate 2 +Whilst 2 +coup 2 +affair 2 +far-reaching 2 +God 2 +autumn 2 +struggles 2 +reserved 2 +plateau 2 +Vice-President 2 +transmission 2 +import 2 +environmentally 2 +purchasing 2 +RCS 2 +sheets 2 +Bike 2 +alongside 2 +cleaning 2 +programming 2 +Work 2 +ended 2 +stylish 2 +Throughout 2 +passing 2 +stand 2 +Irish 2 +risks 2 +adapt 2 +Kraljevica 2 +Montenegro 2 +terrain 2 +advised 2 +editorial 2 +dark 2 +older 2 +Beyond 2 +math 2 +Two-roomed 2 +(10 2 +relatively 2 +intelligent 2 +printing 2 +kept 2 +running 2 +Upon 2 +shield 2 +contained 2 +overall 2 +GDP 2 +celebration 2 +XML 2 +Euronews 2 +oblige 2 +medieval 2 +Milvorum 2 +largely 2 +supplementary 2 +nights 2 +Mandelson 2 +cognitive 2 +concentrate 2 +evident 2 +flora 2 +fauna 2 +challenged 2 +revolved 2 +utilize 2 +scheduled 2 +theme 2 +knocking 2 +pleasing 2 +1969 2 +replaced 2 +1998 2 +banning 2 +shipbuilding 2 +AC 2 +twice 2 +compatible 2 +III 2 +Ingraham 2 +neural 2 +mass 2 +algorithm 2 +Nothing 2 +talked 2 +feed 2 +sought 2 +selective 2 +Youth 2 +News 2 +IP 2 +Consensus 2 +HERE 2 +Fap 2 +Turbo 2 +Al 2 +scientists 2 +India 2 +logical 2 +streams 2 +convergence 2 +Panel 2 +> 2 +Madame 2 +Airbus 2 +acts 2 +shortfall 2 +charming 2 +boating 2 +port 2 +involves 2 +checking 2 +indicator 2 +satisfied 2 +block 2 +properties 2 +applicable 2 +south 2 +lovely 2 +Balkans 2 +short-term 2 +foreign 2 +corporation 2 +residence 2 +Excellent 2 +motorway 2 +Corinth 2 +survived 2 +advanced 2 +decorated 2 +musical 2 +west 2 +Chief 2 +Officer 2 +household 2 +losing 2 +Deutschland 2 +publishes 2 +balanced 2 +bottle 2 +Partner 2 +forwards 2 +della 2 +NORDENIA 2 +opposition 2 +(in 2 +è 2 +Giovanni 2 +exhibitions 2 +concerts 2 +faults 2 +economize 2 +(IVA 2 +TVA) 2 +bedrooms 2 +Over 2 +diagrams 2 +machines 2 +construct 2 +organisms 2 +pragmatism 2 +prospect 2 +committees 2 +limits 2 +defend 2 +makers 2 +pictures 2 +identify 2 +difficulty 2 +brother 2 +privileged 2 +rightly 2 +Tuzla 2 +POS 2 +35 2 +presses 2 +welding 2 +secondly 2 +doth 2 +hath 2 +you; 2 +paying 2 +System 2 +vicinity 2 +airport 2 +sounds 2 +perfectly 2 +greedy 2 +moulds 2 +(Explanation 2 +abbreviated 2 +Procedure) 2 +insight 2 +Philips 2 +weekly 2 +descriptions 2 +factories 2 +finishing 2 +inquiry 2 +guitar 2 +habit 2 +PEmin 2 +usable 2 +interlinked 2 +raising 2 +Asian 2 +honor 2 +containers 2 +arrest 2 +incorporated 2 +22 2 +Small 2 +warrant 2 +correspond 2 +translators 2 +diminish 2 +worse 2 +computers 2 +movie 2 +produces 2 +° 2 +moral 2 +Farben 2 +gold 2 +700 2 +recover 2 +loose 2 +cooler 2 +LDAP 2 +Server 2 +Martin 2 +Thumbnail 2 +ser 2 +mark 2 +evening 2 +phase 2 +sweeping 2 +Esperanto 2 +DVB-C2 2 +Configuration 2 +Sales 2 +prevented 2 +Bay 2 +VI 2 +461 2 +sailing 2 +layer 2 +wins 2 +Thirdly 2 +calculations 2 +estate 2 +George 2 +freshness 2 +healthy 2 +myth 2 +subjects 2 +establishing 2 +library 2 +doubling 2 +fulfilment 2 +wishes 2 +II 2 +clock 2 +imported 2 +2013 2 +Letter 2 +Star 2 +quest 2 +studies 2 +Guide 2 +whenever 2 +differing 2 +criticise 2 +suddenly 2 +strongly 2 +derived 2 +Whether 2 +honeymoon 2 +imperial 2 +transition 2 +apparatus 2 +recipient 2 +procession 2 +Network 2 +21 2 +MatchWork 2 +platforms 2 +evasion 2 +entertainment 2 +Sri 2 +Lanka 2 +springs 2 +SONIC 2 +CORE 2 +certification 2 +Music 2 +tough 2 +fighting 2 +displaced 2 +Mongols 2 +agents 2 +Mid 2 +occasion 2 +cooperate 2 +2.0 2 +bureaucracies 1 +prerogative 1 +void 1 +Master 1 +Senior 1 +Aerobic 1 +Pilates; 1 +Antiche 1 +Terme 1 +Pilates 1 +told 1 +kill 1 +ripped 1 +duplication 1 +delivering 1 +realise 1 +synergies 1 +Title 1 +abiding 1 +precludes 1 +self-defence 1 +contributions 1 +Shortly 1 +thereafter 1 +Mårthen 1 +Cedergran 1 +vocals 1 +Bombshell 1 +Rocks 1 +artist 1 +confines 1 +ATTENTION 1 +PLEASE: 1 +WE 1 +CHARGE 1 +COST 1 +OF 1 +NIGHT 1 +AS 1 +DEPOSIT 1 +AFTER 1 +ONLINE 1 +CONFIRMATION 1 +seafarers 1 +disadvantage 1 +Defence 1 +Euro-Mediterranean 1 +Parliamentary 1 +assessments 1 +danger 1 +Ventura 1 +MC 1 +sewing 1 +improver 1 +Eiffel 1 +conceded 1 +forwards; 1 +claimed 1 +Cardassians 1 +hostilities 1 +settled 1 +Planets 1 +10: 1 +59 1 +wine-growers 1 +replant 1 +expires 1 +frank 1 +troublemakers 1 +comparable 1 +theorists 1 +formulation 1 +(how 1 +observer). 1 +fixtures 1 +fittings 1 +skeletal 1 +acute 1 +chronic 1 +bruises 1 +broadly 1 +detectives 1 +regiment 1 +Police 1 +Equally 1 +readmission 1 +Hallmarking 1 +earliest 1 +silver 1 +1260 1 +Apparently 1 +angles: 1 +preservation 1 +stewardship 1 +jeopardises 1 +Embedded 1 +escape 1 +vendor 1 +lock-in 1 +Australian 1 +Premier 1 +Howard 1 +likewise 1 +mulled 1 +pre-emptive 1 +strikes 1 +sanctuaries 1 +Kares 1 +Finikes 1 +Cretans 1 +Ionians 1 +banana-producing 1 +Further 1 +inventions 1 +two-phase 1 +induction 1 +three-phase 1 +multi-phase 1 +magnetism 1 +Going 1 +Sun 1 +Yat-sen 1 +Freeway 1 +BinJiang 1 +Taipei 1 +ramp 1 +meters 1 +DaZhih 1 +Bridge 1 +Cottigny 1 +subscription 1 +astounded 1 +layers 1 +pretty 1 +adorable 1 +bloom 1 +nang 1 +accommodate 1 +grandparents 1 +grown-up 1 +rotation 1 +protein 1 +volatility 1 +Originals 1 +Gallery 1 +Belvedere 1 +charts 1 +tricky 1 +unity 1 +emission 1 +post-Kyoto 1 +(2012) 1 +perseverance 1 +pig-headedness 1 +imprison 1 +Helms-Burton 1 +ECU 1 +10m 1 +slightly 1 +Smiths 1 +Jervis 1 +1. 1 +handy 1 +Henry 1 +Nature 1 +cross-country 1 +EMULE 1 +mule 1 +named 1 +alas 1 +Known 1 +Typhoon 1 +high-performance 1 +multi-role 1 +aircraft 1 +superiority 1 +air-to-ground 1 +capability 1 +curiosity 1 +gather 1 +Cisalpino 1 +Pendolino 1 +forecasting 1 +Born 1 +Obernberg 1 +Inn 1 +intensely 1 +phrased 1 +abilities 1 +greenhousing 1 +Spelgatti 1 +garden 1 +Keeping 1 +mage 1 +warlock 1 +mean 1 +constitutes 1 +Conan 1 +spent 1 +Meiringen 1 +detective 1 +Readers 1 +reacted 1 +Holmes 1 +displeasure 1 +novelist 1 +resurrect 1 +population 1 +solved 1 +JBOD 1 +discs 1 +": 1 +PC 1 +recognize 1 +HDDs 1 +USB 1 +Zeroual 1 +Algeria 1 +rut 1 +floor: 1 +brick 1 +dischwascher 1 +divan-bed 1 +satellite 1 +tv 1 +adventuresome 1 +vibrant 1 +metropolis 1 +carnival 1 +scans 1 +Docs 1 +Viewer 1 +Adobe 1 +outcome 1 +abovementioned 1 +paving 1 +modernised 1 +on-line 1 +bench 1 +handling 1 +bag 1 +lid 1 +Rincon 1 +Properties 1 +Screen 1 +Saver 1 +StarSaver 1 +Room 1 +cancellation 1 +arisen 1 +uprisings 1 +routines 1 +utilized 1 +functionality 1 +Clearly 1 +depend 1 +intervene 1 +Achieving 1 +impetus 1 +biologists 1 +objected 1 +spherical 1 +Rashevsky 1 +responded 1 +simplest 1 +Lange 1 +spearheaded 1 +captors 1 +threaten 1 +Installation 1 +Manager 1 +silently 1 +uninstalled 1 +speaker) 1 +incapable 1 +farms 1 +midst 1 +Watson 1 +Baroness 1 +Ludford 1 +cautiously 1 +broadest 1 +conducive 1 +corresponding 1 +dice 1 +sickness 1 +couldn 1 +roaming 1 +costs: 1 +prepaid 1 +Surfstick. 1 +launch2net 1 +Premium 1 +Thursday: 1 +pitch 1 +putt 1 +Fixed 1 +NPC 1 +faction 1 +quest-giver 1 +exclamation 1 +Energize 1 +café 1 +76 1 +81% 1 +Ni-content 1 +shielding 1 +intolerable 1 +monopoly 1 +Creative 1 +Recreation 1 +WMNS 1 +Galow 1 +HI 1 +(yellow 1 +black) 1 +stoppage 1 +urged 1 +admonition 1 +ignore 1 +complained 1 +abandon 1 +attribute 1 +favor 1 +provincialism 1 +neighbouring 1 +mercury 1 +liaising 1 +supervising 1 +impartial 1 +appreciation 1 +silent 1 +Additionally 1 +design-scrum-of-scrums 1 +stand-up 1 +GUI 1 +vote; 1 +replay 1 +lowest 1 +button 1 +picture: 1 +surprise 1 +allowance 1 +retraining 1 +semesters 1 +prospective 1 +Lambsdorff 1 +recommendation 1 +reinforcement 1 +examine 1 +PACT-like 1 +inter-modal 1 +cottages 1 +consisting 1 +muster 1 +surfaces 1 +interfaces 1 +micro- 1 +nanometer 1 +state-of-the-art 1 +analytical 1 +Particularly 1 +CGI 1 +(output) 1 +clips 1 +36- 1 +144-notes 1 +column 1 +co-decision 1 +impasse 1 +SLD 1 +prizes 1 +pardon 1 +contributor 1 +(H-0549 1 +00): 1 +prefects 1 +catechesis 1 +teachers 1 +attend 1 +Kindergarten 1 +associations 1 +Exchanges 1 +(FESE), 1 +Counterparty 1 +Clearing 1 +Houses 1 +(EACH) 1 +Depositories 1 +(ECSDA) 1 +Ljeto 1 +ljetovanje 1 +na 1 +Kornatima 1 +nezaboravan 1 +je 1 +neponovljiv 1 +doživljaj 1 +suggested 1 +pension 1 +lent 1 +debt-laden 1 +overly 1 +favorable 1 +inflating 1 +asset 1 +bubbles 1 +burst 1 +legendary 1 +Villas 1 +1500 1 +Palladio 1 +Veronese: 1 +Padova 1 +Venezia 1 +Brenta 1 +Burchiello 1 +hampers 1 +clearance 1 +heterogeneous 1 +shipped 1 +supplier 1 +Until 1 +Smoleńsk 1 +replace 1 +financed 1 +dividing 1 +exists 1 +sherpas 1 +Gebhardt 1 +Fashion 1 +literature 1 +4% 1 +Excessive 1 +Deficit 1 +(EDP). 1 +Toho 1 +Tenax 1 +Co 1 +Ltd 1 +Teijin 1 +fibers 1 +lightest 1 +incorrect 1 +commutation 1 +Businesses 1 +exempted 1 +loosely 1 +categorically 1 +Scottish 1 +Ewing 1 +Piracy 1 +merchant 1 +ruin 1 +indicted 1 +Republika 1 +Srpska 1 +delights 1 +Rex 1 +cinema 1 +(movie 1 +theatre), 1 +admire 1 +République 1 +Musée 1 +Grévin 1 +waxworks 1 +unjust 1 +scant 1 +Normally 1 +Change 1 +choosing 1 +filesystem: 1 +ext3 1 +Audiovisual 1 +(ATC) 1 +organizations 1 +strenuously 1 +defended 1 +complemented 1 +Finance 1 +ECOFIN 1 +perspectives 1 +var 1 +helpfull 1 +humanitarianism 1 +Nashrin 1 +Afzali 1 +Nahid 1 +Jafari 1 +Zeynab 1 +Peyghambarzadeh 1 +whipping 1 +MSN 1 +sitemap 1 +titles 1 +lyrics 1 +Karnevals 1 +Kulturen 1 +Academy 1 +nomination 1 +(however 1 +nominee 1 +invited 1 +join) 1 +Cost-efficiency 1 +knowingly 1 +Deejaysystem 1 +Mk2 1 +mixing 1 +preci 1 +Chaos 1 +chromatics 1 +teachings 1 +playfully 1 +learnable 1 +calm 1 +Palermo 1 +provincial 1 +Syracuse 1 +Shine 1 +Wijchen 1 +youthtitle 1 +message: 1 +Beginning 1 +tickets 1 +Arena 1 +Nov 1 +spreadopenmedia.org 1 +operator 1 +appends 1 +array 1 +duplicated 1 +NOT 1 +overwritten 1 +whoever 1 +smoking 1 +Get 1 +notified 1 +adidas-slopestyle 1 +adds 1 +stuff 1 +personalised 1 +uniquely 1 +Elections 1 +vary 1 +99% 1 +summarise 1 +careers 1 +TUI 1 +(SACU) 1 +Poręba 1 +Documentum 1 +Labelling 1 +deceive 1 +wasteland 1 +Vitus 1 +inclusive 1 +enchanted 1 +allocating 1 +discard 1 +cards: 1 +Return 1 +graveyard 1 +unparliamentary 1 +behaviour 1 +rises 1 +erupting 1 +vent 1 +Anak 1 +Krakatau 1 +performing 1 +streaming 1 +,; 1 +utilities 1 +broadcasting 1 +graphical 1 +missile 1 +Rescue 1 +RAID 1 +arrays 1 +rescue 1 +Vivahotel 1 +Pitti 1 +Ponte 1 +Vecchio 1 +dues 1 +muscle 1 +kid 1 +stud 1 +tatooed 1 +homeboy 1 +violent 1 +Autonomous 1 +Municipality 1 +earnings 1 +2007-2013 1 +PLN 1 +15.4 1 +non-intrusive 1 +appreciated: 1 +eagerly 1 +realm 1 +willingly 1 +abandoned 1 +Chapter 1 +(ALDE). 1 +(a 1 +Sandwich) 1 +(cous 1 +cous 1 +kebabs). 1 +Kith 1 +Kids 1 +() 1 +returns 1 +(" 1 +HQ 1 +stills 1 +Supernatural 1 +5.09 1 +Napolitano 1 +hit 1 +Messenger 1 +Talk 1 +trick 1 +comment 1 +depressing 1 +st 1 +ceiling 1 +ventilator 1 +hairdryer 1 +livestock 1 +adverse 1 +methane 1 +Extensive 1 +expert 1 +p 1 +cosmos 1 +Lied 1 +compositions 1 +G. 1 +Mahler 1 +freely 1 +Everything 1 +Upgrade 1 +Avira 1 +TOPAZ 1 +Archive: 1 +FileAlyzer 1 +plain 1 +Rich 1 +Text 1 +Format 1 +(files 1 +.rtf). 1 +accessions 1 +fatigue 1 +messages 1 +perceptions 1 +concludes 1 +Communist 1 +criminality 1 +B5-0783 1 +McKenna 1 +Lagendijk 1 +Greens 1 +massacre 1 +Nyiramasuhuko 1 +encampment 1 +Interahamwe 1 +prisoner 1 +Tutsi 1 +enrolment 1 +Zócalo 1 +vulnerabilities 1 +precaution 1 +momentum 1 +rampant 1 +separatism 1 +reconciling 1 +beliefs 1 +self-determination 1 +territorial 1 +underlines 1 +61 1 +arvato 1 +announcing 1 +expansion 1 +Blu 1 +ray 1 +capacities 1 +Wrote 1 +psychology 1 +whereby 1 +brilliant 1 +Combining 1 +dreamy 1 +(greeting 1 +Otto 1 +Jaennis 1 +Artea 1 +Verlag) 1 +(Pidy 1 +Transfercolour 1 +Sambra)! 1 +computerised 1 +Safe 1 +(District 1 +Smolyan), 1 +Pamporovo 1 +foothills 1 +Rhodope 1 +Perelik 1 +(2191m). 1 +bikes 1 +Dirk 1 +exerts 1 +delivers 1 +ECSC 1 +cofinancing 1 +Frankie 1 +Kira 1 +countroom 1 +accumulated 1 +awaiting 1 +donations 1 +Rotary 1 +super 1 +Westerschelde 1 +licensed 1 +sportswear 1 +footballs 1 +Deskom 1 +sponsorship 1 +dematerialisation 1 +17th 1 +recognises 1 +transmitters 1 +transcend 1 +legitimising 1 +Tajani 1 +sends 1 +Improved 1 +Verily 1 +Lord: 1 +reflect 1 +HIV 1 +Unique 1 +cosy 1 +Lodge 1 +decibels 1 +Roulette 1 +wheels 1 +38 1 +evenly 1 +spaced 1 +numbered 1 +36 1 +Dear 1 +Touring 1 +Siófok 1 +(15 1 +mintues) 1 +Porto 1 +seemed 1 +H1 1 +mobile 1 +scenarios: 1 +notebook 1 +WLAN 1 +spots 1 +UMTS 1 +3G 1 +connectivity 1 +contacts 1 +Kosova 1 +Kosovan 1 +handicaps 1 +socio-economic 1 +shapely 1 +housing: 1 +rectifier 1 +inverter 1 +legislators 1 +enacted 1 +undoubted 1 +tragedies 1 +PT: 1 +Actually 1 +french 1 +riviera 1 +restricts 1 +busts 1 +15th 1 +Century-built 1 +Nicholas 1 +Church 1 +55 1 +bell 1 +pe 1 +Stalag 1 +Bergen-Belsen 1 +huts 1 +stations: 1 +Nekkerspoel 1 +1152206 1 +Pretty 1 +terraced 1 +Packages 1 +minimize 1 +Near 1 +highways 1 +supermarkets 1 +Parts 1 +Kato 1 +Paphos 1 +Having 1 +130 1 +hybridica 1 +165 1 +refining 1 +halt 1 +Automation 1 +A4-folder 1 +trendFOLD 1 +A3-machine 1 +smartFOLD 1 +courts 1 +Enforcement 1 +problem: 1 +demanded 1 +driven 1 +reconvened 1 +generosity 1 +tossing 1 +bags 1 +ecus 1 +Loss 1 +imbalance 1 +bottom 1 +nessessary 1 +take-offs 1 +differ 1 +take-off 1 +unfeasible 1 +staggering 1 +(also 1 +Walking 1 +signposting) 1 +sledging 1 +well-maintained 1 +swish 1 +pistes 1 +Kinderland 1 +playground 1 +kiddie 1 +amending 1 +8-10 1 +(on 1 +request). 1 +matriculating 1 +B.A. 1 +Fort 1 +Hare 1 +Tambo 1 +permitting 1 +Transpneumatics 1 +Co. 1 +fabricate 1 +hydraulic 1 +dampers 1 +annotations 1 +Database 1 +languages). 1 +Syngenta 1 +bringing 1 +genetically-modified 1 +mistakenly 1 +deliberately 1 +Transferring 1 +Secession 1 +plane 1 +endeavor 1 +individualized 1 +Guinea 1 +decline 1 +nicetobook 1 +started; 1 +marina 1 +Ideas 1 +coalitions 1 +substitutes 1 +21st 1 +tribunals 1 +Rwanda 1 +enters 1 +shameless 1 +expansionism 1 +encompass 1 +peninsula 1 +AEG 1 +Solutions: 1 +uniform 1 +cyanide 1 +crosses 1 +keywords 1 +Frontier 1 +(EFF) 1 +Teaching 1 +curriculum 1 +educators 1 +photographs 1 +documenting 1 +USA 1 +(New 1 +Chicago 1 +York) 1 +anticipating 1 +Zalm 1 +celebrate 1 +accomplish 1 +GATT 1 +shifted 1 +Diverse 1 +user-friendly 1 +favourite 1 +tells 1 +disastrous 1 +transmissions 1 +broadcastings 1 +128 1 +Run 1 +successive 1 +Janinet 1 +43 1 +vastly 1 +tours 1 +band 1 +Formal 1 +standstill; 1 +illiterate 1 +uneducated 1 +feeds 1 +All-In-One 1 +Belgians 1 +Stabilisation 1 +Agreement 1 +Depending 1 +binaries 1 +Marcin 1 +Libicki 1 +aroused 1 +anxiety 1 +caldera 1 +steep-sided 1 +composed 1 +decomposed 1 +bombs 1 +anorthoclase 1 +crystals 1 +degasses 1 +vintage 1 +stainless 1 +bouquet 1 +pineapple 1 +canoes 1 +Menonite 1 +1700 1 +Bronze 1 +Celtic 1 +09: 1 +wilderness 1 +referendum 1 +Lwaxana 1 +daughter 1 +Constable 1 +Odo 1 +aboard 1 +Deep 1 +2369 1 +DaiMon 1 +Tog 1 +maintainer 1 +updated 1 +advisory 1 +athletes 1 +Fuerteventura 1 +Hopefully 1 +22: 1 +Blech 1 +Tech 1 +metal-sheet 1 +covering 1 +job-work 1 +pre-in 1 +Subscribe 1 +Newsletter 1 +groundwater 1 +specialists 1 +30-day 1 +revised 1 +enhances 1 +bloggers 1 +behavior 1 +generating 1 +Marwyn: 1 +Captains 1 +Arthas 1 +Menethil 1 +Scourge 1 +Lich 1 +Marwyn 1 +summoned 1 +Halls 1 +Reflection 1 +purpose: 1 +destroying 1 +intruders 1 +Placed 1 +Manhattan 1 +Empire 1 +Building 1 +amenities 1 +on-site 1 +terrifying 1 +PowerPoint 1 +switching 1 +Eurojust 1 +-Yes 1 +guys 1 +pole-and-line 1 +tuna 1 +longliners 1 +quarterly 1 +12-month 1 +previously 1 +Third 1 +Like 1 +counterparts 1 +mindful 1 +stories 1 +Woodstock 1 +Schweigler 1 +ei 1 +dormitory 1 +duvets 1 +tolerance 1 +Parish 1 +degli 1 +Estensi 1 +Riviera 1 +segments 1 +Sigma 1 +Impressive 1 +chalet 1 +plot 1 +470 1 +basement 1 +see: 1 +virus 1 +generalised 1 +preferences 1 +(GSP) 1 +substantive 1 +CityDesk 1 +archive 1 +cityd-search.zip 1 +Balloonfestival 1 +hast 1 +balloons 1 +Gentle 1 +stroke 1 +alcoholic 1 +excise 1 +Baglioni 1 +Recently 1 +conflicting 1 +Submitted 1 +verified) 1 +16: 1 +Shurshadze 1 +formerly 1 +Shurshin 1 +Petersburg 1 +Ecuador 1 +growers 1 +3-4 1 +resold 1 +22-23 1 +truffle 1 +Bastide-Puylaurent 1 +pharmacy 1 +bakery 1 +grocery 1 +butcher 1 +mechanic 1 +flowers 1 +facilitated 1 +roses 1 +Bahnhofstrasse 1 +car-free 1 +Seiler 1 +Schweizerhof 1 +charmingly 1 +sportive 1 +metric 1 +methodology 1 +highlighted 1 +Murter 1 +Ilyrians 1 +Sudanese 1 +commander 1 +Kony 1 +abductions 1 +enhancement 1 +Vietnamese 1 +polite 1 +re-filling 1 +timid 1 +deserve 1 +Rosa 1 +Díez 1 +González 1 +drum 1 +wholeheartedly 1 +Immigration 1 +Asylum 1 +smuggling 1 +prostitution 1 +se 1 +trafficking 1 +upper 1 +display; 1 +A321 1 +orders 1 +41 1 +pcb 1 +electrically 1 +tracks 1 +pads 1 +vias 1 +nets 1 +stack-up 1 +IDF 1 +DELUXE 1 +Using 1 +Huck 1 +fasteners 1 +pre-cut 1 +readily 1 +Rapti 1 +overlap 1 +happen: 1 +host-country 1 +SFPL 1 +count 1 +ESDP 1 +Petersberg 1 +War 1 +EURES 1 +adviser 1 +obstacles 1 +dollar 1 +Germans 1 +reiterated 1 +refugees 1 +Ecofin 1 +prompt 1 +refrain 1 +assignment 1 +(this 1 +drill 1 +definitions). 1 +Example 1 +5.2 1 +Franck 1 +Zero 1 +88 1 +dealer 1 +Strasbourg 1 +5-10 1 +20-25 1 +hectare 1 +HDTV: 1 +Definition 1 +Television 1 +vertical 1 +horizontal 1 +temporal 1 +baby 1 +bathwater 1 +cafeteria 1 +resolve 1 +existed 1 +Kalanke 1 +Marschall 1 +Hyves 1 +Gentoo 1 +hosting 1 +Bugzilla 1 +cluster 1 +watchdog 1 +rottweiler 1 +summarises 1 +translated 1 +shape 1 +le 1 +Transat 1 +Alps 1 +germination 1 +good: 1 +heaving 1 +mFramework 1 +integrates 1 +deriving 1 +Gibraltar 1 +Lady 1 +Gaga 1 +graphs 1 +plainly 1 +trajectory 1 +miraculously 1 +reversed 1 +yielding 1 +pattern 1 +2008-09 1 +Helcom 1 +Bush 1 +bears 1 +believed 1 +Aşı 1 +Cove 1 +loved 1 +Cruise 1 +coves 1 +picnicking 1 +beaten 1 +clearest 1 +blue 1 +Hvar 1 +Bearing 1 +preconceived 1 +grape 1 +vines 1 +shoppers 1 +Could 1 +liquids 1 +airports 1 +translate 1 +high-value 1 +Mochovce 1 +target-orientated 1 +expenditure-orientated 1 +Prolam 1 +Y 1 +Santiago 1 +Solidarity 1 +Wałęsa 1 +searching 1 +3DOG 1 +CyberShot 1 +TX1 1 +employ 1 +Exmor 1 +CMOS 1 +sensor 1 +low-light 1 +scenarios 1 +grain 1 +disappointed 1 +collapses 1 +sponsoring 1 +stressing 1 +Needless 1 +sabbaticals 1 +motions 1 +(5) 1 +hikes 1 +predominated 1 +consolidations 1 +bailout 1 +spoken 1 +Hessischer 1 +Hof 1 +1952 1 +Lussestova 1 +(apartment 1 +rooms). 1 +Krossen 1 +spot 1 +Will 1 +GMOs 1 +exempt 1 +Carers 1 +kayaks 1 +paddle 1 +Brda 1 +Wda 1 +Piława 1 +Gwda 1 +Zbrzyca 1 +Chocina 1 +Słupia 1 +Łupaw 1 +Parsęta 1 +Drawa 1 +Wieprza 1 +Rurzyca 1 +Pronk 1 +misunderstandings 1 +reminder 1 +flags 1 +sky 1 +Operations 1 +Division 1 +WASHINGTON 1 +Rearranging 1 +deck 1 +saved 1 +Titanic 1 +idea: 1 +half-bottles: 1 +Uses 1 +Snippet 1 +Unterstellbahn 1 +(cable 1 +car) 1 +Meraner 1 +ridge 1 +Texel 1 +Athenas 1 +Kotzia 1 +conveniently 1 +Ingredients: 1 +REMOLANTM 1 +hair 1 +women) 1 +ÒÓ 1 +Ó 1 +1960505136.002-2000 1 +(Ukrainian 1 +specifications). 1 +High-quality 1 +coconut 1 +glycerin 1 +plasticiser 1 +antioxidant 1 +interchange 1 +tackled 1 +discrepancies 1 +sheet 1 +1971 1 +Warren 1 +Rudman 1 +Hampshire 1 +Deputy 1 +Congestion 1 +peripheral 1 +worry 1 +CARO 1 +sixty 1 +Cam 1 +Sex 1 +camgirls 1 +harmonising 1 +Reform 1 +lighthouse 1 +familial 1 +suffused 1 +suffocatingly 1 +constrictive 1 +5.5 1 +non-exclusive 1 +revocable 1 +tyres 1 +(blowouts) 1 +tared 1 +stony 1 +Papagayo 1 +Beaches 1 +driver 1 +token 1 +wished 1 +Taiwanese 1 +relief 1 +KIC 1 +Stylists 1 +actors 1 +reporters 1 +blank 1 +canvas 1 +dotted 1 +colour 1 +funky 1 +lamps 1 +energies 1 +clear: 1 +militate 1 +relive 1 +thermotimber 1 +practise 1 +fast-changing 1 +software-development 1 +devotion 1 +secret 1 +verse 1 +spoiled 1 +Sexgirl 1 +hardly 1 +inflatable 1 +Documents 1 +lectures 1 +Biovision 1 +Karamanou 1 +keen 1 +overrun 1 +ruse 1 +burdens 1 +Leaving 1 +prowess 1 +Resumption 1 +decade 1 +zanox: 1 +kicked-off 1 +globe 1 +reply; 1 +shaping 1 +regrettable 1 +Credit 1 +subsidiary 1 +BMG 1 +Reduction 1 +bed: 1 +50% 1 +(special 1 +offer: 1 +periods 1 +child), 1 +11-17 1 +co-ordination 1 +Community-wide 1 +Telecom 1 +conclusions 1 +receiving 1 +prospega 1 +Planning 1 +Distribution 1 +unaddressed 1 +Mails 1 +Leaflets 1 +Brochures 1 +Samples 1 +described 1 +acidifying 1 +ecosystems 1 +divestment 1 +Sewing 1 +cheeks 1 +males 1 +solitary 1 +handler 1 +attacker 1 +crash 1 +sshd 1 +theoretically 1 +31 1 +lengthened 1 +pricelist 1 +Schaake 1 +Sakharov 1 +Prize 1 +roots 1 +deeper 1 +terrible 1 +Pino 1 +Arlacchi 1 +Progressive 1 +consist 1 +of: 1 +Lectures 1 +contradictions 1 +halved 1 +reasonably 1 +lived 1 +Zbigniew 1 +Jujka 1 +motorized 1 +inhabitant 1 +Mewa 1 +owner 1 +Jawa 1 +motorbike 1 +bought 1 +wall 1 +wanted 1 +pronounce 1 +fulfils 1 +desires 1 +Employment 1 +stimulated 1 +engaged 1 +Delivery 1 +estimate 1 +Lüttge 1 +Dam 1 +Beurs 1 +Berlage 1 +copy 1 +publisher 1 +information); 1 +backlash 1 +imbalances 1 +complaining 1 +cartels 1 +Lesser 1 +better-known 1 +conduit 1 +synchronize 1 +PalmPilot 1 +KOrganizer 1 +Kontact 1 +Akonadi 1 +bloc 1 +gratitude 1 +overhead 1 +render 1 +association 1 +excellence 1 +Fairness 1 +edition 1 +Timesensor 1 +tourists 1 +complain 1 +headaches 1 +stomach-ache 1 +fainted 1 +AddressDoctor 1 +Online 1 +Julian 1 +dubious 1 +contemporaries 1 +dept-collectors 1 +pop 1 +explosion 1 +undemocratic 1 +eurozone-wide 1 +unilaterally 1 +artfully 1 +blower 1 +Disclosure 1 +SIX 1 +Ordinance-FINMA 1 +SESTO-FINMA 1 +notices 1 +MailWasher 1 +EGOSOFT 1 +forums 1 +crew 1 +Happy 1 +Partners 1 +IT-solutions 1 +SME 1 +Institutions 1 +multi-national 1 +Ethics 1 +Corporate 1 +Responsibility 1 +(CSR) 1 +– 1 +CropEnergies 1 +1.000 1 +resident 1 +Balakovo 1 +Flexibility 1 +higher-performance 1 +NI 1 +407x 1 +PCI 1 +PXI 1 +FlexDMM 1 +IGEFI 1 +ImageIron 1 +vignetting 1 +parameters 1 +high-end 1 +evaluate 1 +measurable 1 +decoration: 1 +coatings 1 +indirect 1 +decoration 1 +init 1 +classes 1 +solid-state 1 +atomization 1 +removes 1 +dust 1 +preceding 1 +nitro-carburizing 1 +subsequent 1 +oxidation 1 +minefields 1 +unexploded 1 +ordnances 1 +Sarajevo 1 +suburbs 1 +preached 1 +Mars 1 +Hill 1 +(Areopagus) 1 +unknown 1 +god 1 +(Acts 1 +22-34). 1 +repeal 1 +(EEC) 1 +4056 1 +86 1 +negatively 1 +carriers 1 +statue 1 +Imre 1 +Kálmán 1 +pavilion 1 +allready 1 +postpone 1 +immensely 1 +Giansily 1 +PORTAGE 1 +NICENESS 1 +(ECtHR). 1 +disillusioned 1 +populations 1 +Boasting 1 +Spree 1 +Friedrichstrasse 1 +kitchenware 1 +refresh 1 +200% 1 +out-of-ordinariness 1 +Brinkmanship 1 +politician 1 +desperate 1 +Rapporteur 1 +earmarked 1 +ACORD 1 +Piebalgs 1 +forefront 1 +modes 1 +replacement 1 +urges 1 +reccomend 1 +AP-4 1 +Gradient 1 +redecorated 1 +cafe-restaurant 1 +Gioia 1 +manage 1 +Markus 1 +User 1 +groups: 1 +LUG 1 +Flensburg 1 +floods 1 +clockwork 1 +high-flown 1 +RSS 1 +Subscription 1 +Imprint 1 +FIS 1 +Grrrr 1 +'d 1 +vague 1 +surgery 1 +30.04.2009 1 +Ulm 1 +Administrative 1 +prohibition 1 +nazi 1 +rally 1 +Neu-Ulm 1 +Montserrat 1 +newest 1 +airline 1 +WTM 1 +wooded 1 +pulse 1 +UNO 1 +inefficient 1 +corrupt 1 +Robert 1 +Mugabe 1 +oppress 1 +harassment 1 +indoor 1 +33C 1 +Companies 1 +develops 1 +manufactures 1 +distributes 1 +axial 1 +Interfering 1 +dispute 1 +underlined 1 +usefulness 1 +dialogues 1 +Multi-vitamin 1 +juices 1 +twelve 1 +kinds 1 +normally 1 +catalogues 1 +affixed 1 +changeover 1 +coinage 1 +coins 1 +tactile 1 +Teamwork 1 +scrapping 1 +A), 1 +lamp 1 +diodes 1 +rocker 1 +switches 1 +A). 1 +Voltmeter 1 +1-0-2 1 +coil 1 +1,5 1 +Basel 1 +hectic 1 +Conversely 1 +icon 1 +themes 1 +Asia-Europe 1 +Decentralisation 1 +conspicuous 1 +Utilization 1 +flux-controlled 1 +ISO 1 +9001 1 +contractors 1 +Closing 1 +tortured 1 +EBRD 1 +agent 1 +Louis 1 +Berger). 1 +CDA 1 +obligations 1 +(Distance 1 +Selling) 1 +Regulations 1 +fulfilled 1 +Visitors 1 +Hochgurgl 1 +3000 1 +meter 1 +sight 1 +unpopular 1 +stuck 1 +Argentines 1 +investing 1 +Energy 1 +168.3 1 +PV 1 +complement 1 +re-regulation 1 +stabilising 1 +hear 1 +relative 1 +Tanusevcí 1 +temporarily 1 +occupied 1 +UCK 1 +Nokia 1 +Siemens 1 +Networks 1 +profited 1 +Adventure-based 1 +Learning 1 +evangelistic 1 +brochures 1 +Bibles 1 +Puns 1 +apart 1 +Tourists 1 +Bibione 1 +consular 1 +Delingua 1 +openness 1 +· 1 +nuclear-armed 1 +ballistic 1 +missiles; 1 +congratulated 1 +forgot 1 +formats 1 +steam 1 +Islamic 1 +Dimension 1 +Thongpaseuth 1 +Keuakoun 1 +Kamphouvieng 1 +Sisaath 1 +Seng-Aloun 1 +Phenphanh 1 +Bouavanh 1 +Chanhmanivong 1 +Keochay 1 +disappeared 1 +imprisoned 1 +Vientiane 1 +surged 1 +Ancaria 1 +Elves 1 +follow-up 1 +slowness 1 +mismanagement 1 +inadequate 1 +supervision 1 +Delaying 1 +wait 1 +Archduchess 1 +Marie 1 +Louise 1 +married 1 +Napoleon 1 +divorce 1 +Josephine 1 +interfere 1 +MGI 1 +CompanyScope 1 +distanced 1 +supplement 1 +worn 1 +weakness 1 +ARCADEMI 1 +submits 1 +-button 1 +neighborhood 1 +150 1 +unitary 1 +raid 1 +centrally 1 +busiest 1 +entrepreneurial 1 +distressed 1 +Mouse-click 1 +kittens 1 +jump 1 +publicly 1 +generalisation 1 +universal 1 +signalling 1 +Merely 1 +zeal 1 +convert 1 +dislike 1 +outlets 1 +Blau 1 +benzene 1 +carcinogenic 1 +Heddon 1 +AA 1 +two-star 1 +78% 1 +merit 1 +brochure 1 +Wheel 1 +taido 1 +Artports.com 1 +sectors: 1 +well-balanced 1 +public-private 1 +additive 1 +mortar 1 +exact 1 +recommended 1 +T-shirts 1 +AKVIS 1 +Coloriage 1 +Equipped 1 +Pratt 1 +Whitney 1 +PT6A-25 1 +evolution 1 +T-34A 1 +piston 1 +U.S. 1 +bedsheets 1 +recption 1 +(Nok 1 +50). 1 +moderately 1 +priced 1 +Lehtomäki 1 +par 1 +birdie 1 +Plymo 1 +suck 1 +eat 1 +Flying 1 +Foxes 1 +saint 1 +delicacy 1 +grilled 1 +sessions 1 +disruption 1 +turbulence 1 +parent 1 +SWX 1 +85 1 +Rudiger 1 +Fahlenbrach 1 +Rene 1 +Stulz 1 +studying 1 +CEOs 1 +Kittelmann 1 +re-convened 1 +Within 1 +timetables 1 +Night 1 +schedules 1 +Pradollano 1 +Nr. 1 +Vientos 1 +obligated 1 +reveal 1 +Geoff 1 +Muldaur 1 +Pelham 1 +Tools 1 +dockable 1 +dialog; 1 +Section 1 +2.3 1 +Dialogs 1 +Docking 1 +manipulating 1 +Angeles 1 +Seattle 1 +OU 1 +modernisation 1 +Group: 1 +boards 1 +(3) 1 +GUE 1 +NGL 1 +nonsense 1 +tagged 1 +ingenious 1 +salary 1 +preferable 1 +Europol 1 +fabulous 1 +survive 1 +harness 1 +RWS 1 +rifle 1 +cartridges 1 +conquered 1 +shooters 1 +luxuriant 1 +vegetation 1 +pine 1 +woods 1 +Bright 1 +flowerbeds 1 +shady 1 +golden 1 +washed 1 +shallow 1 +Bohemian 1 +Dragomans 1 +services: 1 +Written 1 +Transparencies 1 +Presentations 1 +Photos 1 +Features 1 +PORTAL 1 +residential 1 +Teguise 1 +Lanzarote; 1 +Las 1 +Marinas 1 +Tumors 1 +100% 1 +drop 1 +Desolace 1 +logic 1 +neoliberalism 1 +Millennium 1 +Goals 1 +timorous 1 +unavailability 1 +Cyanide 1 +extraction 1 +pose 1 +thouroughly 1 +partially 1 +Shares 1 +Entry 1 +securities 1 +monitored 1 +(BaFin) 1 +Shopping 1 +CartYou 1 +cart 1 +tablespoon 1 +SEC 1 +angle 1 +Machaut 1 +priest 1 +travelled 1 +secretary 1 +duke 1 +Luxembourg 1 +Automatic 1 +Grinder 1 +AU 1 +sparked 1 +grinding 1 +kit 1 +non-discrimination 1 +Semester 1 +simple: 1 +ex-ante 1 +tasty 1 +00-10: 1 +MGM 1 +vararity 1 +pleasant 1 +ZAGREB 1 +CROATIA 1 +playoffs 1 +Lakers 1 +defeated 1 +Sacramento 1 +Kings 1 +Phoenix 1 +Suns 1 +Portland 1 +Trailblazers 1 +finals 1 +Indiana 1 +Pacers 1 +NBA 1 +Finals 1 +congratulating 1 +Wijsenbeek 1 +characteristically 1 +thoughtful 1 +reasoned 1 +pedestrians 1 +jeopardising 1 +informations 1 +Terra 1 +Select 1 +^ 1 +Mussolini: 1 +Study 1 +Ivone 1 +Kirkpatrick 1 +Hawthorne 1 +1964 1 +Heckling 1 +pipes 1 +2000mm 1 +diameter 1 +Variable 1 +Summary 1 +Balloon 1 +Layout 1 +"), 1 +variables 1 +insert 1 +opening-up 1 +balloon 1 +tooltips 1 +delphi 1 +Preliminary 1 +Draft 1 +Unilateral 1 +invalid 1 +LateRooms 1 +Brilgar 1 +Christopher 1 +Conrad 1 +Ce 1 +restera 1 +jamais 1 +grav 1 +ma 1 +moire 1 +comme 1 +mon 1 +coeur 1 +Un 1 +grand 1 +Merci 1 +'avoir 1 +fait 1 +moi 1 +r 1 +veur 1 +pragmatique 1 +elaborate 1 +assemblage 1 +territory 1 +flambé 1 +MAIBACH 1 +regenerates 1 +80s 1 +90s 1 +bodegas 1 +culminating 1 +long-cherished 1 +denomination 1 +county 1 +Binissalem 1 +Thursday 1 +promotes 1 +Leyla 1 +Zana 1 +trucks 1 +tanks 1 +polyester 1 +ex-Yugoslavia 1 +spokesman 1 +conducting 1 +witching 1 +signaux 1 +descente 1 +seront 1 +audible 1 +sue 1 +145.800MHz 1 +FM 1 +Critics 1 +Koakin 1 +proffers 1 +hens 1 +soccer 1 +balls 1 +caps 1 +assets 1 +emphasize 1 +Elephant 1 +Dance 1 +Dragon 1 +circa 1 +harmonization 1 +postponed 1 +definitive 1 +abolition 1 +rice 1 +sugar 1 +standardised 1 +cometitive 1 +ranks: 1 +Technology 1 +builds 1 +Browser 1 +router 1 +Fallback-function 1 +dialing 1 +DSL 1 +ISDN 1 +used) 1 +fax 1 +frustration 1 +wake-up 1 +lively 1 +bars 1 +Javier 1 +Solana 1 +Tony 1 +Blair 1 +Barrosso 1 +Winner 1 +ring 1 +elusive 1 +analysing 1 +State-owned 1 +appeared 1 +app 1 +Video 1 +Hansafilm 1 +UG 1 +explicitly 1 +invoke 1 +catechism 1 +corner-stone 1 +substitute 1 +LUMIX 1 +DMC-FX 1 +compact 1 +VERTU 1 +Cobra 1 +replica 1 +outrageous 1 +tag 1 +310,000 1 +cheaper 1 +misunderstood 1 +observatory 1 +check-in 1 +check-out 1 +tick 1 +Show 1 +statistic 1 +frustrations 1 +charity 1 +Comments 1 +Tags: 1 +Nvu 1 +Tsimberg 1 +Visconti 1 +keeps 1 +Hopfingerbräu 1 +Hauptbahnhof 1 +brewery 1 +Lindenbräu 1 +Potsdamer 1 +Platz 1 +falsely 1 +raises 1 +policyholders 1 +blatant 1 +naïve 1 +rubberstamp 1 +fulfilling 1 +delayed 1 +desirable 1 +fissure 1 +68 1 +disadvantaged 1 +Péry 1 +miss 1 +transact 1 +Qaddafi 1 +companions 1 +supplemented 1 +coterie 1 +technocrats 1 +IND 1 +DEM 1 +jubilation 1 +sought-after 1 +non-trading 1 +deposits 1 +withdrawals 1 +CKFX 1 +beneficial 1 +Held 1 +Nina 1 +Hagen 1 +Udo 1 +Lindenberg 1 +bands 1 +Transvision 1 +Vamp 1 +Killing 1 +Joke 1 +trademarks 1 +referenced 1 +Ferber 1 +centralised 1 +Mendelssohn 1 +partition 1 +spanning 1 +disk 1 +duplicating 1 +preemptive 1 +whatever 1 +march 1 +possessing 1 +bomb 1 +differentiated 1 +COM 1 +fruits 1 +vegetables 1 +angered 1 +subsidize 1 +rehearse 1 +spake 1 +Jews 1 +Israel; 1 +confounded 1 +scattered 1 +assistant 1 +Time 1 +(B5-0163 1 +2001). 1 +spacing 1 +engendered 1 +required; 1 +subregional 1 +fostering 1 +manuals 1 +Pian 1 +Novello 1 +ALPINE 1 +attends 1 +tastes 1 +sapori 1 +Attaching 1 +polish 1 +modifying 1 +eighty 1 +cafe- 1 +Jacobsstreet 1 +Cafe 1 +ETA 1 +refuses 1 +reproducing 1 +Heinz-Harald 1 +Frentzen 1 +15-February-1999 1 +laptimes 1 +preseasonal 1 +far-sightedness 1 +LOUIS 1 +XIV 1 +MAZARIN 1 +End 1 +Thirty 1 +Loungepillow 1 +(as 1 +surprise)? 1 +Being 1 +teach 1 +divide 1 +eliminating 1 +viruses 1 +1921 1 +1951 1 +Serengeti 1 +14% 1 +capita 1 +monstertoy 1 +spreaded 1 +asscunt 1 +max 1 +soooooo 1 +horny 1 +Demanding 1 +scandalous 1 +Kenyan 1 +audit 1 +Commonwealth 1 +judges 1 +Giants 1 +mail 1 +filtered 1 +regardless 1 +WYSIWYG 1 +guestbook 1 +lcd 1 +locked 1 +Ctrl-C 1 +Magnolia 1 +Ortigia 1 +Siracusa 1 +conservativo 1 +palace 1 +Caterina 1 +Sebastiano 1 +Pollaci 1 +Moldova 1 +unstable 1 +crop: 1 +cutting 1 +besides 1 +Verena 1 +(22) 1 +Heels 1 +ballerinas 1 +straps 1 +shoes 1 +Anyway 1 +Mato 1 +Adrover 1 +commend 1 +lend 1 +Equal 1 +Opportunities 1 +fischer 1 +Universal 1 +Plug 1 +UX 1 +polyamide 1 +DuPont 1 +Gstaad 1 +Rougemont 1 +Splitting 1 +scalable 1 +O 1 +1.7 1 +31.8 1 +AM 1 +PM 1 +Notes 1 +Consulting 1 +afield 1 +towns 1 +Real 1 +Chaves 1 +brass 1 +Coordinates 1 +Visualization 1 +XLSTAT 1 +consciousness 1 +inhibition 1 +cortex 1 +controlled 1 +centers 1 +Heidenhain 1 +hypnosis 1 +diminution 1 +vagueness 1 +citizenship 1 +expressing 1 +bets 1 +Evonik 1 +Saar 1 +Main 1 +5-minute 1 +swiftly 1 +Standards 1 +extensions 1 +market-distorting 1 +infected 1 +turkey 1 +Metric 1 +Customer 1 +External 1 +Metrics 1 +SLAs 1 +adaptations 1 +Hainaut 1 +scarcely 1 +screened 1 +supervise 1 +credit-rating 1 +enforcement 1 +sidewalks 1 +Bali 1 +tops 1 +storm-water 1 +drains 1 +ft 1 +excessive 1 +administrations 1 +expenses 1 +incurred 1 +riser 1 +catching 1 +spectators 1 +Eventually 1 +Uganda 1 +Peter 1 +Kalibbala 1 +didn 1 +tuition 1 +Putin 1 +intent 1 +envoy 1 +Moratinos 1 +Launch 1 +URL 1 +Americans 1 +limousine 1 +kerbside 1 +five-and-a-half 1 +rebate 1 +Note 1 +predefined 1 +KStars). 1 +46 1 +lung 1 +Lung 1 +Cancer 1 +outcomes 1 +cross-boundary 1 +European-wide 1 +rainforests; 1 +20% 1 +greenhouse 1 +gases 1 +Aspergers 1 +autism 1 +ABA 1 +teaching 1 +nuances 1 +Brazilian 1 +Bahia 1 +dug 1 +Boulder 1 +Dash 1 +Milosevic 1 +Rugova 1 +presidential 1 +imprudent 1 +co-author 1 +ground-breaking 1 +Limits 1 +1972 1 +Dennis 1 +Meadows 1 +launched 1 +vocabulary 1 +modernism 1 +tradition 1 +Put 1 +counters 1 +Clockwork 1 +Steed 1 +radically 1 +ENISA 1 +Massage 1 +sauna 1 +vichy 1 +jacuzzi 1 +hydromassage 1 +bathtub 1 +seaweed 1 +Lote 1 +localizado 1 +Fontibon 1 +(Bogota) 1 +escasos 1 +minutos 1 +Aeropuerto 1 +Dorado 1 +totalmente 1 +plano 1 +En 1 +actualidad 1 +f 1 +Whatever 1 +equation 1 +homosexuality 1 +necrophilia 1 +repugnant 1 +deeply 1 +disgraceful 1 +Jackson 1 +argument: 1 +cargo 1 +bureaus 1 +courier 1 +haulage 1 +Halle 1 +PortGround 1 +Compensation 1 +degradation 1 +Speaking 1 +Julio 1 +occurred 1 +disappear 1 +killed 1 +insolence 1 +1283 1 +8217 1 +multi-panel 1 +ratify 1 +Maritime 1 +Questions 1 +non-preferential 1 +bilateral 1 +drop-down 1 +beside 1 +Table 1 +Games 1 +underline 1 +Annual 1 +Report 1 +supervisory 1 +Swiebel 1 +mentions 1 +qualify 1 +towels 1 +Messrs 1 +Pomés 1 +Ruiz 1 +fastviewer 1 +desktop 1 +dignity 1 +dismissed 1 +laid-off 1 +Autoconf 1 +2.13 1 +upstream 1 +Debian 1 +dependency 1 +Five 1 +Experimentale 1 +Heimat 1 +Moderne 1 +investigates 1 +modernity 1 +Mix 1 +bread 1 +shrimp 1 +chestnuts 1 +ginger 1 +yolk 1 +bowl 1 +consultations 1 +shortest 1 +implants 1 +banned 1 +bridged 1 +unequivocally 1 +applaud 1 +comprehend 1 +albums 1 +ways: 1 +(Part 1 +commented 1 +below). 1 +Modern 1 +meritocratic 1 +avant-garde 1 +285 1 +captain 1 +CEO 1 +Madagascar 1 +Affected 1 +criticized 1 +defect 1 +Via 1 +industrie 1 +31040 1 +Pietra 1 +Salgareda 1 +(TV) 1 +executable 1 +founding 1 +moulding 1 +ENGEL 1 +65 1 +Irene 1 +Schwarz 1 +characterises 1 +Carbon 1 +D-box 1 +extends 1 +embraces 1 +main-spar 1 +richer 1 +less-developed 1 +asker 1 +105 1 +Scenes 1 +Silence 1 +respondents 1 +complexity 1 +allotment 1 +Again 1 +heaven 1 +replies 1 +Turning 1 +rabbi 1 +asks: 1 +Rabbi 1 +Goldstein 1 +obese 1 +figure 1 +doubled 1 +knows 1 +Dale 1 +Conde 1 +Saro 1 +Christoffersen 1 +technologically 1 +assisted 1 +superbox 1 +master 1 +altogether 1 +compelling 1 +Best 1 +Shinjuku 1 +Astina 1 +carrying 1 +Seville 1 +fireplace 1 +upset 1 +angry 1 +orchestrated 1 +curious 1 +divisions 1 +apparent 1 +Worse 1 +(SV) 1 +fifteen 1 +spreading 1 +Zen 1 +reminded 1 +hesitate 1 +diagnosis 1 +remedies 1 +Lodging 1 +roundwood 1 +cottage 1 +St-Felix-in-Valois 1 +Lanaudière 1 +lifts 1 +sanctions: 1 +reassured 1 +Ludendorff 1 +Poznan 1 +(Posen) 1 +present-day 1 +Prussia 1 +Map 1 +(polygons) 1 +paths 1 +bike 1 +stumbling 1 +landmarks 1 +Mosque 1 +Hagia 1 +Sophia 1 +Byzantinum 1 +DMS 1 +und 1 +Webnologic 1 +visiting 1 +cityguide-dms.com 1 +Plast 1 +Milan 1 +cross-section 1 +groundbreaking 1 +full-time 1 +feasts 1 +Beautiful 1 +helpless 1 +surrender 1 +relentless 1 +mechanical 1 +dedicate 1 +Taxi 1 +Gaudlitz 1 +cineaste 1 +circles 1 +organize 1 +separately 1 +poorly 1 +leadership 1 +verdant 1 +climbing 1 +peaks 1 +landscaped 1 +experienced 1 +1580) 1 +chandelier 1 +1682-84 1 +fascinated 1 +discreet 1 +unspoilt 1 +rusticity 1 +Sugano 1 +(DA)... 1 +Dan 1 +Jørgensen: 1 +Mexico 1 +scepticism 1 +utilised 1 +interconnectedness 1 +poses 1 +acquis 1 +Sole 1 +occupancy 1 +G 1 +27,00 1 +E 1 +45,00 1 +unconverted 1 +surviving 1 +F-111As 1 +retired 1 +mothballed 1 +AMARC 1 +Davis 1 +Monthan 1 +AFB 1 +formal 1 +Quotation 1 +award 1 +550 1 +bonus 1 +totals 1 +earn 1 +Tourney 1 +Top 1 +Ten 1 +noncommercial 1 +provoked 1 +deluge 1 +branch), 1 +destined 1 +mainline 1 +everybody 1 +Chivay 1 +northwest 1 +Cruz 1 +Condor 1 +Cabanaconde 1 +spacious 1 +well-equipped 1 +studio 1 +conditioning 1 +studied 1 +reform-minded 1 +breathing 1 +barcelonetasuites 1 +collects 1 +costs). 1 +deflation 1 +Dr 1 +Schäuble 1 +subgroups 1 +engravers 1 +Flexographers 1 +exposure 1 +absorbed 1 +propagation 1 +AEGRAFLEX 1 +survival 1 +paralyse 1 +Tudor 1 +thirdly 1 +determines 1 +arterial 1 +backbone 1 +committee; 1 +Moreau 1 +enumeration 1 +corresponds 1 +contracting 1 +embracing 1 +fishery 1 +televised 1 +attitude 1 +Astonishingly 1 +Joshua 1 +turns 1 +3rd 1 +Palafiori 1 +Corso 1 +Garibaldi 1 +Sanremo 1 +1st 1 +Ice 1 +cream 1 +Scroll 1 +1QIsa 1 +Manual 1 +Discipline 1 +1QS 1 +Habakkuk 1 +Commentary 1 +1QpHab 1 +scrolls 1 +fragments 1 +Qumran 1 +Dead 1 +Fuel 1 +EIB 1 +Remote 1 +Printer 1 +configure 1 +printer 1 +share). 1 +contacting 1 +Secret 1 +Turnaround 1 +assisting 1 +promotion 1 +fascination 1 +sandstone 1 +happenings 1 +Cultural 1 +1938 1 +1945 1 +KULTURHAUPTSTADT 1 +DES 1 +FÜHRERS 1 +Nazis 1 +phantasmagorias 1 +continuities 1 +Chisinau 1 +drowning 1 +cooks 1 +chef 1 +Fujii-san 1 +Makis 1 +Nigiris 1 +Tempura 1 +boiled 1 +login 1 +guest-house 1 +Vera 1 +tram 1 +des 1 +fille 1 +nue 1 +tres 1 +haute 1 +qualit 1 +Dubai 1 +BFF 1 +Corsendonk 1 +Viane 1 +Apartments 1 +Turnhout 1 +Maintaining 1 +graduates 1 +veterinary 1 +chiropractic 1 +competency 1 +IAVC 1 +cheque 1 +5% 1 +Whole 1 +exterior 1 +Curiosity 1 +nails 1 +Bendigo 1 +Ballarat 1 +Victoria 1 +retained 1 +tramway 1 +Europeana 1 +digitised 1 +lawmakers 1 +Miskolc 1 +kilometres 1 +Bratislava 1 +arises 1 +Ajoutez 1 +bonnes 1 +vous: 1 +ça 1 +augmente 1 +aussi 1 +vos 1 +asymmetric 1 +Lenin 1 +returning 1 +Klamt 1 +Allgäu-Tyrol 1 +Family 1 +Pass 1 +81 1 +Vital 1 +Reutte 1 +tel 1 +Explore 1 +Helicopter 1 +deteriorated 1 +deteriorating 1 +up-to-date 1 +CAD 1 +CAM 1 +testers 1 +e-mails 1 +Beta 1 +Evidently 1 +community-forming 1 +2.0-created 1 +text-based 1 +Treaties 1 +Tickets 1 +12.00 1 +(CET). 1 +yours 1 +province 1 +Huesca 1 +Aisa 1 +Pyrenees 1 +HESS 1 +Jagiellonian 1 +1364 1 +gDoc 1 +XPS 1 +DOC 1 +XLS 1 +PPT 1 +fares 1 +long-distance 1 +ideologies 1 +proclaimed 1 +headlight 1 +PT100 1 +rated 1 +(0.00%) 1 +Amending 1 +sex 1 +tranny 1 +chatting 1 +shemale 1 +porn 1 +stars 1 +Softlab 1 +Bernd 1 +Stroppel 1 +facility 1 +T-Systems 1 +split 1 +categories 1 +all-white 1 +jury 1 +acquitted 1 +officers 1 +deliberation 1 +titled 1 +AM-111 1 +prevents 1 +semicircular 1 +canal 1 +otitis 1 +Otolaryngology 1 +Florida 1 +Gainesville 1 +(USA) 1 +décor 1 +constructional 1 +atomic 1 +approve 1 +designers 1 +catwalk 1 +Lacroix 1 +Dior 1 +Columbus 1 +marking 1 +300th 1 +anniversary 1 +SAVE 1 +Altener 1 +cornerstones 1 +environment-friendly 1 +Bristol-Myers 1 +Squibb 1 +pharmaceutical 1 +Eurobonds 1 +roles 1 +opera 1 +NH 1 +Schiller 1 +40: 1 +43: 1 +Problem 1 +quantities 1 +exceeded 1 +pandering 1 +Students 1 +chemistry 1 +wonders 1 +Alhambra 1 +broker 1 +endearing 1 +Congress 1 +Players 1 +Nanclus 1 +Darryl 1 +Henriques 1 +Roma 1 +education; 1 +83 1 +Dumbrava 1 +Sibiu 1 +EZColor 1 +considerably 1 +SilverFast 1 +Harald 1 +Kastlunger 1 +Cyclops 1 +imagination 1 +intends 1 +Europe: 1 +countervailing 1 +anti-dumping 1 +Sweetly 1 +styled 1 +lush 1 +berry 1 +kitted 1 +Madrid 1 +Furtheron 1 +friction 1 +greasy 1 +dirt 1 +railhead 1 +wellness 1 +Romantikhotel 1 +Fischerwirts 1 +cost). 1 +Similar 1 +restatements 1 +Law 1 +quasi-legislative 1 +pointless 1 +shaft 1 +load 1 +winding 1 +temperatures 1 +Alexandra 1 +Rousset 1 +Anne 1 +Wade 1 +V1 1 +group), 1 +Gislaine 1 +Ballans 1 +V2 1 +'s). 1 +Palacio 1 +Ines 1 +civilised 1 +locks 1 +reconstructions 1 +modernizations 1 +Autosan 1 +H9-21 1 +H10 1 +coaches 1 +instances 1 +(genus 1 +case) 1 +relocated 1 +backups 1 +doctrine 1 +justify 1 +doctors 1 +did; 1 +inuring 1 +paved 1 +busy 1 +transformed 1 +legally-binding 1 +mature 1 +sluts 1 +fucking 1 +Bonn 1 +53113 1 +Stockenstr 1 +Meals 1 +hay-loft 1 +Rename 1 +xCubase 1 +sequencer 1 +derives 1 +strengths 1 +Stroll 1 +Nova 1 +Conceiçao 1 +Castle 1 +Keno 1 +ticket 1 +leisurement 1 +archeological 1 +Torreblanca 1 +Sol 1 +casle 1 +Sohail 1 +iglesias 1 +parroquiales 1 +esteemed 1 +indication 1 +Q. 1 +depart 1 +terminate 1 +routing 1 +Airpass 1 +Kaleb 1 +suppress 1 +revolt 1 +recapture 1 +eTwinning 1 +government-financed 1 +Pentru 1 +juca 1 +Engleza 1 +Portugheza 1 +gratuit 1 +una 1 +sau 1 +mai 1 +multe 1 +casute 1 +aflate 1 +langa 1 +numele 1 +lectiilor 1 +tipul 1 +si 1 +apasati 1 +butonul 1 +Incepe 1 +PatchManager 1 +Java 1 +relational 1 +Berès 1 +right: 1 +catastrophic 1 +flatly 1 +entitled 1 +Augusto 1 +Pinochet 1 +Monti 1 +differentiate 1 +institutes 1 +Economics 1 +Administration 1 +silence 1 +Richard 1 +Jason 1 +Quinn 1 +horrendous 1 +Tarsch 1 +220-m-long 1 +shoes; 1 +gravel 1 +clay 1 +barefoot 1 +Härzlisee 1 +Brunni 1 +(food 1 +available). 1 +seatback 1 +footrest 1 +Cabin 1 +fridge 1 +quilts 1 +Graefe 1 +zu 1 +Baringdorf 1 +irresponsible 1 +dilution 1 +toilets 1 +lodgings 1 +Crete 1 +digits 1 +copied 1 +colorful 1 +raped 1 +abounds 1 +price. 1 +visting 1 +disneyland 1 +val 1 +europe 1 +Unemployment 1 +problem; 1 +low-level 1 +speedy 1 +Player 1 +pensions 1 +knowing 1 +forty 1 +Mödlareuth 1 +Little 1 +Antonio 1 +Gramsci 1 +nations 1 +nursery 1 +hospitals 1 +Hübner 1 +Nick 1 +Brandt 1 +Silke 1 +Lauffs 1 +sensuous 1 +Register 1 +blog 1 +diversion 1 +sprayed 1 +spray 1 +propably 1 +drawback 1 +simple-math 1 +CVS 1 +checked 1 +confirm 1 +PSP 1 +anticipated 1 +warmer 1 +restart 1 +user-friendliness 1 +MP3 1 +brand 1 +gadget 1 +younger 1 +fixated 1 +concluding 1 +paradoxically 1 +one-sided 1 +liberalising 1 +NJ 1 +Transit 1 +Transportation 1 +1979 1 +Privacy: 1 +confidentiality 1 +1990s 1 +intellectuals 1 +noon 1 +plight 1 +Papuan 1 +involve 1 +Testing 1 +Queven 1 +Lorient 1 +Brittany 1 +rims 1 +emphasized 1 +topical 1 +Share 1 +directory 1 +c: 1 +ftpboot 1 +\ 1 +naive 1 +sexual 1 +Otherwise 1 +broadcast 1 +focussed 1 +outsourced 1 +devote 1 +Taiwan 1 +pronounced 1 +cerebral 1 +PFS 1 +output 1 +specialties 1 +dalmatian 1 +cousine 1 +Sir 1 +Leon 1 +Brittan 1 +consulted 1 +beforehand 1 +exposed 1 +genital 1 +mutilation 1 +Sowing 1 +grasses 1 +clover 1 +hedges 1 +possession 1 +bittern 1 +sweep 1 +besom 1 +Lord 1 +Hosts 1 +retard 1 +lowers 1 +examination 1 +ambiguity 1 +imprecise 1 +loopholes 1 +5.00 1 +hospitality 1 +cleanliness 1 +downtown 1 +Haarlem 1 +personalities 1 +Sites 1 +pornography 1 +75.00 1 +remark 1 +prompts 1 +straight 1 +hundreds 1 +rainforest 1 +Gilson 1 +static 1 +rigging 1 +Oslo 1 +SusCon 1 +Lodgis 1 +boosts 1 +H.264 1 +accelerator 1 +hardware 1 +foyers 1 +abstain 1 +Pietikäinen 1 +rushing 1 +clothing 1 +coordinated 1 +division 1 +Round 1 +Clock 1 +DDD 1 +Pool 1 +simulator 1 +EZ 1 +Dock 1 +waterfront 1 +amalgamated 1 +Jaminan 1 +ini 1 +untuk 1 +semua 1 +barang 1 +tahan 1 +lama 1 +dengan 1 +dibawah 1 +Tidak 1 +bagi 1 +penawaran 1 +khusus 1 +barang-barang 1 +segar 1 +yang 1 +harus 1 +disimpan 1 +dingin 1 +dan 1 +grosir 1 +middle-income 1 +high-wage 1 +airlines 1 +Citizens 1 +industrialized 1 +Visa 1 +entering 1 +90 1 +permit 1 +calmly 1 +astonished 1 +brutality 1 +beating 1 +% 1 +TZ7 1 +built-in 1 +HDMI 1 +timetable 1 +coordinating 1 +2-3 1 +unaffected; 1 +4-6 1 +UART 1 +0x80 1 +0xff 1 +font 1 +InternalRAM 1 +String 1 +displaying 1 +across-the-board 1 +ID: 1 +PDI 1 +20100903 1 +PDI3052 1 +Credit: 1 +picturedesk.com 1 +Date: 1 +2010-09-03 1 +Title: 1 +Caption: 1 +LOOK 1 +FLO 1 +-- 1 +Stöckl 1 +Samstag 1 +Glück 1 +Macht 1 +Erfolg 1 +anti-environmental 1 +anti-European 1 +non 1 +destructive 1 +(NDT) 1 +density 1 +troxler 1 +laying 1 +mixture 1 +splendour 1 +McAvan 1 +inspiring 1 +Olympus 1 +aerospace 1 +troubleshooting 1 +downtime 1 +maximizing 1 +facebook 1 +vooch 1 +Budgets 1 +Globalisation 1 +resulting 1 +Mercedes 1 +GLK 1 +posters 1 +equiped 1 +beamzones 1 +Le 1 +case-vacanze 1 +Cirella 1 +dotate 1 +giardino 1 +accesso 1 +diretto 1 +alla 1 +spiaggia 1 +antistante 1 +Tutti 1 +gli 1 +appartamenti 1 +che 1 +vi 1 +offriamo 1 +buone 1 +condizioni 1 +abitabilità 1 +impianti 1 +igienico-sanitari 1 +perfetto 1 +stato 1 +ordine 1 +nell 1 +'arredamento 1 +completi 1 +dovuto 1 +necessario 1 +confortevole 1 +soggiorno 1 +pops 1 +turntable 1 +deepening 1 +energy-related 1 +problems: 1 +stemming 1 +Flourishing 1 +Pear-tree 1 +variants 1 +delicate 1 +convincing 1 +manifestation 1 +sedate 1 +mastery 1 +high-minded 1 +poetic 1 +emotion 1 +falls 1 +03 1 +eurozone 1 +single-digit 1 +unsustainable 1 +aiming 1 +translational 1 +multipolarity 1 +cast 1 +binary 1 +constraints 1 +medicine 1 +creator 1 +coronation 1 +parochial 1 +globally: 1 +manifest 1 +Iraq 1 +concur 1 +backside 1 +mounting 1 +suitable 1 +housings 1 +Perhaps 1 +pageframe 1 +VFX 1 +Form 1 +Builders 1 +ilink 1 +TeamCall 1 +devise 1 +AWF 1 +RWT 1 +sagas 1 +Pyrrhic 1 +victories 1 +tree 1 +Refugee 1 +Dilemma 1 +PHARE 1 +recipients 1 +assumes 1 +Behold 1 +Jesus 1 +prophets 1 +testified 1 +Chancellor 1 +Aznar 1 +sympathetically 1 +Instability 1 +borderline 1 +(emotionally 1 +unstable) 1 +bipolar 1 +swings 1 +manic 1 +menstrual 1 +Ancona 1 +Torrette 1 +1956 1 +Angela 1 +Lina 1 +father 1 +Eugenio 1 +intentions 1 +inheritance 1 +Lusu 1 +Mission 1 +Zambia 1 +Zambezi 1 +seek 1 +inheritances 1 +council 1 +surprisingly 1 +endeavour 1 +mistakes 1 +destiny 1 +carcasses 1 +focusing 1 +well-prepared 1 +119 1 +optimization 1 +corridor 1 +Stream 1 +(Caspian-Georgia-Black 1 +Sea-Ukraine-Romania) 1 +pretext 1 +bolster 1 +profits 1 +inhaled 1 +confront 1 +elephant 1 +room: 1 +128.9 1 +forest 1 +motor- 1 +scooters 1 +pace 1 +aftermath 1 +maintaining 1 +independence 1 +crabes 1 +superior 1 +Lahn 1 +notwithstanding 1 +streamed 1 +SL 1 +6pm 1 +CEST 1 +5pm 1 +multiannual 1 +Corbett 1 +Méndez 1 +Vigo 1 +twenty-five 1 +ALDE 1 +(FI) 1 +Schmitt 1 +closure 1 +Cape 1 +Verde 1 +Rica 1 +credited 1 +trainees 1 +dual 1 +succession 1 +Pakistan: 1 +intolerance 1 +trial 1 +killings 1 +constructive 1 +Afghanistan 1 +mistaken 1 +dead 1 +remind 1 +intangible 1 +splints 1 +immobilization 1 +Baqueira 1 +Beret 1 +disposition 1 +heating 1 +(max) 1 +lodges 1 +borrowing 1 +LKW 1 +Walter 1 +Prisma 1 +Kreditversicherungs 1 +Created 1 +NetImage 1 +Request 1 +no-obligation 1 +Elisabeth 1 +Tyrolean 1 +Mr. 1 +Sison 1 +9.7% 1 +Claims 1 +10% 1 +Childer 1 +invest 1 +Resources 1 +Adapting 1 +10-15 1 +concise 1 +7.15 1 +p.m. 1 +Vitosha 1 +landmark 1 +latter: 1 +substantial 1 +legitimisation 1 +space: 1 +synce-trayicon 1 +Applications 1 +Aydan 1 +Murtezaoglu 1 +equivocal 1 +ambiguities 1 +disclose 1 +open-ended 1 +117 1 +apprentices 1 +federal 1 +apprentice 1 +GAST 1 +Strengthened 1 +verge 1 +debates 1 +Menrad 1 +phasing 1 +systematic 1 +workflows 1 +value-creation 1 +firstly 1 +sincerest 1 +Azzolini 1 +Riis-Jørgensen 1 +Renting 1 +motorbikes 1 +mountainbikes 1 +Gomera 1 +Drupal 1 +finding 1 +sympathy 1 +Riad 1 +serenity 1 +specified 1 +prescription 1 +error 1 +-1 1 +returned 1 +errno 1 +arguments 1 +fora 1 +Twenty-five 1 +necessity 1 +flexibly 1 +appraisal 1 +valuation 1 +Across 1 +Scotland 1 +10.10 1 +resumed 1 +10.35 1 +a.m.) 1 +Ability 1 +joiner 1 +online-business 1 +skiers 1 +snowboarders 1 +Norton 1 +Malware 1 +Possibly 1 +UnPopular 1 +(PUPS)? 1 +materially 1 +changed 1 +Relieves 1 +restless 1 +flashes 1 +convey 1 +Description 1 +activities: 1 +patented 1 +hoteliers 1 +Want 1 +Brava 1 +administrator 1 +disallow 1 +attachment 1 +Dwelling 1 +formalities 1 +sleeping 1 +glimpse 1 +high-rise 1 +newspapers 1 +transatlantic 1 +sum 1 +semitones 1 +(eg 1 +do-re) 1 +augmented 1 +diminished 1 +specialization 1 +guide 1 +spaceship 1 +alleviation 1 +least-developed 1 +Lomé 1 +Conventions 1 +WCC 1 +churches 1 +prayer 1 +spirituality 1 +theological 1 +memoirs 1 +Turbulence 1 +clues 1 +co-operation 1 +agenda: 1 +Strejček 1 +disconnected 1 +pipeline 1 +unloaded 1 +EGO 1 +Sails 1 +breed 1 +evaluation 1 +three-year 1 +guy 1 +relate 1 +12.50 1 +p.m.) 1 +segment 1 +agri-food 1 +enact 1 +assent 1 +sustain 1 +capitalize 1 +Serbian 1 +SAP 1 +enterprise-wide 1 +painless 1 +croissants 1 +clinic 1 +increasing; 1 +persecuted 1 +nonetheless 1 +heartfelt 1 +Berlusconi 1 +six-month 1 +spectacular 1 +fiasco 1 +Oberndorf 1 +Tirol 1 +Airfare 1 +(CA) 1 +Shanghai 1 +(FM) 1 +Mainland 1 +Proved 1 +worldwide: 1 +linear 1 +batcher 1 +RD 1 +inception 1 +KSB 1 +carved 1 +Cocoa 1 +(theobroma 1 +cocoa) 1 +sterculiaceae 1 +warmth 1 +humidity 1 +flourishes 1 +climatic 1 +height 1 +4-15c.m. 1 +theatres 1 +Piccadilly 1 +Covent 1 +Garden 1 +volume 1 +high-grade 1 +Camaleón 1 +Sports 1 +outlet 1 +brocade 1 +curtains 1 +carpets 1 +contrast 1 +light-filled 1 +ceilings 1 +disparities 1 +unequal 1 +discretion 1 +displays 1 +travelers 1 +accountability 1 +granting 1 +producer 1 +exemption 1 +dvdisaster 1 +FreeBSD 1 +GNU 1 +(Darwin), 1 +NetBSD 1 +Helsinki 1 +eliminated 1 +bright 1 +Love 1 +grass 1 +yard 1 +donors 1 +Bids 1 +bidder 1 +telephonist 1 +]; 1 +Hampel 1 +Participation 1 +clinical 1 +seminar 1 +Image 1 +diagnostics 1 +tooth 1 +periodontal 1 +stick 1 +creditor 1 +twenty 1 +embargo 1 +debtor 1 +projections 1 +yearly 1 +1.95% 1 +forestry 1 +recreation 1 +crosshair 1 +frame 1 +Bugtraq 1 +SecurityFocus): 1 +BugTraq 1 +ID 1 +18034 1 +Firefox 1 +smart 1 +choices 1 +bi-regional 1 +Andean 1 +Legal 1 +García-Margallo 1 +Marfil 1 +Macartney 1 +proposes 1 +cease 1 +compulsory 1 +ultimate 1 +distant 1 +shells 1 +interconnected 1 +clamp 1 +shift 1 +GNP 1 +feelings 1 +manpage 1 +Jerusalem 1 +20th 1 +resembles 1 +lined 1 +evoking 1 +faded 1 +seductive 1 +elegance 1 +smoothly 1 +desert 1 +SINUMERIK 1 +840Di 1 +Motion 1 +clever 1 +timing 1 +Disney 1 +Snow 1 +Seven 1 +Dwarfs 1 +moodily 1 +faceted 1 +unfaceted 1 +Jet 1 +matt 1 +Greater 1 +Steckel 1 +illicit 1 +Islam 1 +costume 1 +Acquisition 1 +Wrap 1 +eBay 1 +Outstanding 1 +pre-accession 1 +VII 1 +interoperability 1 +marvellous 1 +Hilary 1 +Erhard 1 +Duff 1 +(born 1 +1987) 1 +actress 1 +singer 1 +songwriter 1 +preference 1 +poem 1 +(So 1 +MUVRINI 1 +...). 1 +frequently 1 +novelties 1 +Asturforesta 1 +Bu 1 +bulletin 1 +Commercial 1 +80% 1 +butterfat 1 +traditionally-made 1 +65% 1 +reforming 1 +6: 1 +Secretarial 1 +guides 1 +taxi 1 +eukaryotic 1 +organelle 1 +sudden 1 +evolutionary 1 +leap 1 +incorporating 1 +genetic 1 +biochemical 1 +drew 1 +fruition 1 +justifiably 1 +DKK 1 +lacks 1 +Facilities 1 +Disabled 1 +Soundproofed 1 +Shops 1 +ferry 1 +Helsingborg 1 +(Sweden). 1 +civilisation 1 +thinks 1 +Romanian 1 +Agenda 1 +premature 1 +DMP 1 +offline 1 +multipurpose 1 +SAZKA 1 +ARENA 1 +Carl 1 +von 1 +Schubert 1 +doctoral 1 +thesis 1 +viticulture 1 +steep 1 +slopes 1 +Interestingly 1 +Hamas 1 +approaches 1 +Palestinian 1 +consultation 1 +farthest 1 +Yellow 1 +filters 1 +photography 1 +lightens 1 +foliage 1 +Client 1 +violations 1 +destabilising 1 +prolonging 1 +german 1 +earthly 1 +window 1 +dominate 1 +Zionist 1 +unless 1 +recognized 1 +neighborly 1 +latter 1 +2177 1 +BBC 1 +Porfirio 1 +Lobo 1 +Sosa 1 +putschist 1 +Honduras 1 +guardians 1 +observation 1 +age-old 1 +Xilisoft 1 +iPod 1 +Rip 1 +suspected 1 +involvement 1 +assassinations: 1 +Ergenekon 1 +years; 1 +decreasing 1 +Operation 1 +Cast 1 +Lead 1 +inhabitants 1 +thirteen 1 +metallic 1 +polymers 1 +researchers 1 +high-tech 1 +PostCertificate 1 +PostCertificates 1 +Communication 1 +favored 1 +bungalows 1 +govern 1 +fashioning 1 +Statistical 1 +SSS 1 +Gall 1 +smart-power 1 +revolutionary 1 +confronting 1 +combining 1 +soft 1 +northeast 1 +Novi 1 +Vinodolski 1 +authorising 1 +Herend 1 +Porcelain 1 +Manufactory 1 +celebrates 1 +commemorates 1 +motif 1 +Millions 1 +die 1 +sanitation 1 +prohibit 1 +democratically 1 +customize 1 +toolbar 1 +moderated 1 +tips 1 +banter 1 +ThyssenKrupp 1 +Aerospace 1 +annually 1 +mill 1 +14,000 1 +aluminum 1 +uninstall 1 +EyeConnect 1 +Barack 1 +State-of-the-art 1 +hang 1 +thick 1 +flows 1 +Enervit 1 +cable-car 1 +Kräbel 1 +Arth-Rigi 1 +Rigi-Scheidegg 1 +scenes 1 +Sky 1 +House: 1 +Satsang 1 +XAMPP 1 +examples 1 +civilians 1 +children; 1 +eye 1 +rinse 1 +odd 1 +Local 1 +state-owned 1 +Marylebone 1 +Oxford 1 +Bond 1 +Underground 1 +Fete 1 +Votive 1 +bulls 1 +criticised 1 +determine 1 +fairly 1 +accusations 1 +subsequently 1 +Magellan 1 +disfavor 1 +court 1 +king 1 +Manuel 1 +Important 1 +athlete 1 +Workgroup 1 +workgroup 1 +tavernas 1 +tiny 1 +Plakias 1 +differently 1 +1.8 1 +TWG 1 +sprites 1 +Developing 1 +shields 1 +concealing 1 +Hume 1 +Ireland: 1 +famine 1 +broadening 1 +cover 1 +stipulates 1 +Amateur 1 +2008th 1 +12th 1 +2008. 1 +Continue 1 +Poggibonsi 1 +Nord 1 +exit). 1 +1878 1 +Podgorica 1 +1946 1 +till 1 +employers 1 +unions 1 +Europe-minded 1 +frightened 1 +(settees 1 +armchairs 1 +cupboards 1 +dvd 1 +kitchens 1 +WC 1 +heating). 1 +racer 1 +slalom 1 +gates 1 +Russians 1 +Marked 1 +sorry 1 +Seppänen 1 +Scientists 1 +witnessed 1 +textbooks 1 +Jordan 1 +1994 1 +nationalism 1 +implicitly 1 +God; 1 +idiom 1 +secular 1 +NEW 1 +YORK 1 +economically 1 +Centrifugal 1 +turbo 1 +compressor 1 +(impeller 1 +outside) 1 +acceleration 1 +skid 1 +stacked 1 +impeller 1 +kinetic 1 +Years 1 +Eve 1 +Gala 1 +Dinner 1 +obligatory 1 +HB 1 +FB 1 +predetermined 1 +Designed 1 +Iria 1 +Degen 1 +Jasper 1 +purist 1 +sensual 1 +precious 1 +reminds 1 +cheese 1 +banding 1 +rocks 1 +basal 1 +glacier 1 +(July 1 +2008). 1 +soldering 1 +loco 1 +Win 1 +thrilling 1 +underlying 1 +theorem 1 +Late 1 +hampering 1 +build-dep 1 +apt-get 1 +retrieve 1 +compilation 1 +recommends 1 +cultures 1 +dining 1 +cabin 1 +STRAT 1 +GO 1 +Visual 1 +Studio 1 +.NET. 1 +NAS 1 +landline 1 +Gigabit 1 +1000). 1 +GEA 1 +Aktiengesellschaft 1 +MDAX 1 +index 1 +(WKN: 1 +660 1 +ISIN: 1 +DE0006602006). 1 +challenging 1 +invasion 1 +permissible 1 +Arrests 1 +bans 1 +ignition 1 +initially 1 +warmed 1 +diabetes: 1 +insulin-dependent 1 +non-insulin 1 +dependent 1 +adolescents 1 +middle-aged 1 +simulation 1 +aerials 1 +combines 1 +rectitude 1 +Ferengi 1 +2364 1 +Troi 1 +discern 1 +thoughts 1 +pick 1 +race-relation 1 +fairness 1 +karts 1 +finalists 1 +calibration 1 +Kick 1 +Off 1 +Sensible 1 +Soccer 1 +detention 1 +Masarykovo 1 +Nouveau-style 1 +Jihlava 1 +Barroso: 1 +Šemeta 1 +Roerich 1 +Grünwedel 1 +Der 1 +Weg 1 +nach 1 +Guidebook 1 +(Tib 1 +cyclically 1 +surplus 1 +0.6% 1 +verifying 1 +Gav 1 +Tellar 1 +sit 1 +Tashahhud 1 +recitation 1 +Prayer 1 +Tasleem 1 +Ardo 1 +Rangelrooij 1 +Entity 1 +Resolution 1 +Catalogs 1 +spam 1 +Initially 1 +clouds 1 +youths 1 +disability 1 +grades 1 +generated 1 +CHF 1 +60.00 1 +maintainers 1 +bankers 1 +Firm 1 +No-one 1 +eve 1 +wage 1 +suspicion: 1 +chemical 1 +three-decade-long 1 +Keynesian 1 +capitalist 1 +regulated 1 +fluctuations 1 +160; 1 +ppm 1 +sulphur 1 +leeway 1 +Gallanti 1 +Holiday 1 +Pomposa 1 +Po 1 +Delta 1 +Ferrara 1 +Next 1 +G20 1 +discharges 1 +(Vote 1 +Thursday) 1 +(debate) 1 +exert 1 +registry 1 +displace 1 +lakeside 1 +Villar 1 +Valle 1 +Vilare 1 +reserving 1 +basket 1 +(Transylvanian 1 +homemade 1 +brandy 1 +vine 1 +Champaign)! 1 +downhill 1 +profession 1 +styling 1 +Dachser 1 +rollout 1 +CSS 1 +codes 1 +W3C. 1 +siteli 1 +personnel 1 +indulging 1 +basest 1 +instincts 1 +drafted 1 +servants 1 +custom 1 +Colombia 1 +GSP 1 +birth 1 +acquisition 1 +unloading 1 +lorries 1 +elevating 1 +pallet 1 +articulated 1 +lifting 1 +tables 1 +serial 1 +Prado 1 +Joaquín 1 +Araujo 1 +settle 1 +contention 1 +self-evident 1 +pupils 1 +pedagogical 1 +ACP-EU 1 +Joint 1 +Assembly 1 +Mauritius 1 +printmaking 1 +Linking 1 +unrelated 1 +anyways 1 +cautious 1 +Investors 1 +(Regulated 1 +Unofficial 1 +Market) 1 +investor 1 +EU-regulated 1 +Spoiler 1 +warning: 1 +Clancy 1 +gender-related 1 +Parks 1 +1543 1 +astronomer 1 +Nicolaus 1 +Copernicus 1 +Revolutions 1 +Celestial 1 +Spheres 1 +arguing 1 +LEWA 1 +Precise 1 +Innsbruck 1 +circus 1 +Disneyland 1 +Marne-La-Vallée-Chessy 1 +Eurostar 1 +cleaner 1 +8.00 1 +compliment 1 +Danesin 1 +188 1 +398 1 +www.cafe-bazar.at 1 +ncm.at 1 +tendency 1 +upcoming 1 +liaison 1 +Palestinians 1 +donor 1 +speeches 1 +promises 1 +galleries 1 +Carnavales 1 +Lanz 1 +Alsasua 1 +Ituren 1 +Zubieta 1 +climax 1 +supreme 1 +sat 1 +Lords 1 +Supreme 1 +HADEP 1 +champions 1 +vendors 1 +nitrofen 1 +Whyeth 1 +Pharmaceutical 1 +Bioland 1 +exerting 1 +pledge 1 +provisional 1 +revisited 1 +costings 1 +praise 1 +intense 1 +(HU) 1 +five-year 1 +arrangement 1 +splashing 1 +buses 1 +air-conditioning 1 +replacing 1 +engaging 1 +backwards 1 +translates 1 +double-quote 1 +leaves 1 +single-quote 1 +untranslated 1 +accomodating 1 +staff.Fantastic 1 +Sq 1 +subway 1 +nutritional 1 +skepticism 1 +hence 1 +expectation 1 +Rheingoldhotel 1 +whimsical 1 +inviting 1 +Deneva 1 +deadly 1 +parasites 1 +insanity 1 +2265 1 +patriot 1 +league 1 +Predefined 1 +Quick-Picks 1 +backup 1 +Socialist 1 +dealers 1 +Hungary 1 +Macedonia 1 +13.7 1 +Liability 1 +typical 1 +backed 1 +suggesting 1 +deregulation 1 +reductions 1 +existence 1 +dictators 1 +flatfish 1 +cod 1 +caught 1 +biological 1 +immobilizer 1 +Crypto 1 +Eli 1 +Biham 1 +Orr 1 +Dunkelman 1 +et 1 +Political 1 +Stylish 1 +culinary 1 +creations 1 +standard: 1 +Duke 1 +ELLINGTON 1 +HOTEL 1 +Nürnberger 1 +Strasse 1 +ships 1 +spaceships 1 +pleaded 1 +timescale 1 +confrontation 1 +deep 1 +thwarted 1 +hierarchy 1 +SNG 1 +Early 1 +Stage 1 +SNGs 1 +Combined 1 +archiving 1 +scanning 1 +sellers 1 +ME 1 +monthly 1 +constructs 1 +match 1 +newline 1 +Porth 1 +Avallen 1 +feeding 1 +conditions; 1 +Personally 1 +umantis 1 +contiuously 1 +profit 1 +wildlife 1 +firmer 1 +legal-ethical 1 +footing 1 +Logically 1 +complying 1 +Kyoto 1 +eBenedict.org 1 +Daily 1 +WYD08 1 +Espanol 1 +WYD 1 +Italiano 1 +ybenedict.org 1 +Papal 1 +Plate 1 +winner 1 +viewing 1 +Military 1 +Aircraft 1 +A400M 1 +Refuelling 1 +Tanker 1 +': 1 +definitions 1 +Mine 1 +taboret 1 +judgement 1 +figures 1 +Lucerne 1 +safer 1 +Jimmy 1 +Robert: 1 +rouge 1 +noir 1 +16-mm 1 +Saskia 1 +Keyser 1 +indicates 1 +routine 1 +confirmation 1 +CLICK 1 +Immediately 1 +Download 1 +Forex 1 +Robot 1 +forex 1 +rendez-vous 1 +well-drained 1 +western-facing 1 +sloping 1 +soils 1 +suited 1 +breeding 1 +pasturing 1 +small-scale 1 +Bank: 1 +encroachment 1 +Palestinian-controlled 1 +firing 1 +traceability 1 +intelligence 1 +workforces 1 +directors 1 +cartel 1 +philosophy 1 +presenting 1 +broad-based 1 +engagement 1 +swallowing 1 +fits 1 +Inconvenient 1 +Truth 1 +documentary 1 +featuring 1 +Gore 1 +touching 1 +therapeutic 1 +fragmentation 1 +Arnab 1 +Goswami 1 +chief 1 +editor 1 +English-news 1 +discusses 1 +Peace 1 +Symposium 1 +Birmingham 1 +England 1 +abortions 1 +mortality 1 +mothers 1 +Mariahilfer 1 +Straße 1 +longest 1 +trunk 1 +rugged 1 +lakes 1 +swift 1 +intriguing 1 +blow 1 +ideology 1 +Start- 1 +Settings- 1 +Start 1 +solely 1 +6th 1 +locker 1 +garage 1 +study-in.de 1 +considers 1 +throughput 1 +modeled 1 +analyzed 1 +graph 1 +-based 1 +formalisms 1 +Petri 1 +net 1 +castles 1 +heat 1 +Gazette 1 +www.ebundesanzeiger.de 1 +proliferation 1 +multiplication 1 +committees; 1 +Extremely 1 +speculation 1 +futures 1 +contracts 1 +Alkacon 1 +malice 1 +intentionally 1 +negligence 1 +hazardous 1 +inflexibility 1 +inelasticity) 1 +amplifies 1 +Telephone 1 +boxes 1 +profitable 1 +benefited 1 +Ben 1 +Ali 1 +Trabelsi 1 +Colombiers 1 +Slovenia 1 +exhibits 1 +gathering 1 +part-time 1 +geared 1 +patterns 1 +fruitful 1 +Ombudsman 1 +firmware 1 +Attention 1 +achievement 1 +holidaymakers 1 +indirectly 1 +Transfers 1 +venue 1 +ceramic 1 +tiles 1 +showroom 1 +Vinjani 1 +amnesty 1 +expose 1 +exploiters 1 +PSI 1 +Production 1 +introduces 1 +holostic 1 +telematics 1 +Old 1 +Trader 1 +submits: 1 +Lately 1 +Ive 1 +readers 1 +SA 1 +cranky 1 +Perma-Bear 1 +FP7 1 +ideally 1 +posizionato 1 +Reims 1 +hotel: 1 +1880 1 +Versilia 1 +uninhabited 1 +swampy 1 +myself: 1 +GOD 1 +LEDs 1 +photodiodies 1 +phototransistors 1 +diode 1 +optical 1 +accepts 1 +Waddington 1 +oral 1 +(2-11years) 1 +fare 1 +plus 1 +surcharges 1 +Lenovo 1 +Series 1 +desktops 1 +engineered 1 +worry-free 1 +computing 1 +82nd 1 +4th 1 +Sejm 1 +2004. 1 +www.sejm.gov.pl 1 +(Parliament 1 +resolution) 1 +A19 1 +Vicarage 1 +en-suite 1 +mile 1 +M62 1 +Good 1 +post-American 1 +twentieth 1 +30,000 1 +tons 1 +Others 1 +Portage 1 +Reducing 1 +vehicles 1 +financial-sector 1 +recovers 1 +excuses 1 +Clinton 1 +Warcraft 1 +copyrighted 1 +Blizzard 1 +Entertainment 1 +Inc 1 +organized 1 +Delaware 1 +non-economically 1 +unhappy 1 +one: 1 +non-implementation 1 +patchwork 1 +burdensome 1 +connections 1 +Bruges 1 +Antwerp 1 +mess 1 +suffer 1 +pensioners 1 +impatiently 1 +holder 1 +Tre 1 +Ci 1 +Luce 1 +S.p.A. 1 +C.so 1 +(Città 1 +Satellite) 1 +20020 1 +Cesate 1 +(MI) 1 +Tel 1 +commerce 1 +flourish 1 +insecurity 1 +Perpignan 1 +literary 1 +Bosomy 1 +anal 1 +ride 1 +potent 1 +cock 1 +Aquila 1 +Priscilla 1 +handful 1 +collapse 1 +palette 1 +Ugandan 1 +radical 1 +planted 1 +tremendous 1 +landmines 1 +admin 1 +VTC 1 +refined 1 +presents 1 +folk 1 +costumes 1 +troop 1 +rowdy 1 +musicians 1 +Elbe 1 +lowlands 1 +Conjuring 1 +strange 1 +thunderous 1 +rhythmic 1 +productions 1 +recounted 1 +mythology 1 +volcano 1 +rare 1 +freedoms 1 +waived 1 +topped 1 +vitamins 1 +Oh 1 +heckling 1 +endeavouring 1 +Accordingly 1 +Mobility 1 +Botti 1 +EADS 1 +President-in-Office 1 +Allister 1 +condemned 1 +harmful 1 +henceforth 1 +EC 1 +ninjutsu 1 +fled 1 +Kadampa 1 +Meditation 1 +headquarters 1 +Tharpa 1 +Verlag 1 +non-profit 1 +Geshe 1 +Kelsang 1 +Gyatso 1 +unthinkable 1 +Rompuy 1 +Gawain 1 +turning 1 +co-ordinates 1 +Kynon 1 +registering 1 +truthfully 1 +Terence 1 +Wynn 1 +Elmar 1 +Brok 1 +tabling 1 +unquestionably 1 +poster 1 +printed 1 +(email 1 +berlin 1 +ffii.org). 1 +shapes 1 +sizes 1 +Erdogan 1 +proud; 1 +Djibouti 1 +(PT) 1 +Florence 1 +Maria 1 +Fiore 1 +Repubblica 1 +Signoria 1 +Duomo 1 +Giotto 1 +belltower 1 +Hänsch 1 +Don 1 +waterfalls 1 +Varone 1 +Gorg 1 +'Abiss 1 +Tremosine 1 +gorge 1 +Herbert 1 +Bader 1 +Director 1 +TECHNOLOGIES 1 +anti-democratic 1 +texts 1 +language) 1 +Wiki 1 +MyWiki 1 +Bayview 1 +Sliema 1 +promenade 1 +Marsamxetto 1 +Valletta 1 +Ancora 1 +oggi 1 +trattoria 1 +Ischia 1 +gestita 1 +cura 1 +sapienza 1 +dal 1 +fratello 1 +mentre 1 +cuoco 1 +figlio 1 +Alex 1 +shame 1 +Rahel 1 +tentatively 1 +quitting 1 +Galeri 1 +Petronas 1 +Be 1 +festivals 1 +performances 1 +Apartement 1 +salon 1 +Embermage 1 +Goblin 1 +Carefree 1 +Package 1 +SMS 1 +stating 1 +supporting 1 +vaild 1 +17.5% 1 +Active 1 +Na 1 +Zámečku 1 +enjoyments 1 +loves 1 +unwind 1 +outsourcing 1 +reliably 1 +cheaply 1 +Overlooking 1 +Baie 1 +Cavalaire 1 +Résidence 1 +Marronniers 1 +artisanal 1 +fleets 1 +multi-species 1 +Sarud 1 +lounge 1 +Brazil 1 +preview 1 +Debian-Installer 1 +developers 1 +Which 1 +Zīle 1 +fixed 1 +performance-related 1 +dreamed 1 +Fathers 1 +replication 1 +proteins 1 +objects 1 +multi-cellular 1 +Same 1 +tears 1 +drunkenness 1 +servo 1 +Donnelly 1 +Thyssen 1 +Secchi 1 +Indian 1 +subcontinent 1 +Georgieva 1 +coordinate 1 +Soulier 1 +AIBO 1 +chase 1 +battery 1 +collapsed 1 +premises 1 +Bize 1 +CALANDRETA 1 +LO 1 +CIGAL 1 +tranquillity 1 +ASEAN 1 +modicum 1 +transient 1 +mailing 1 +1977 1 +1974 1 +1976 1 +1986 1 +1988 1 +awaits 1 +INTERNATIONAL 1 +CORONOR 1 +Composites 1 +excellect 1 +Catania 1 +remarkable 1 +Civil 1 +Liberties 1 +succeeded 1 +uniting 1 +rating 1 +95% 1 +channel 1 +Check 1 +liking 1 +Regulators 1 +(CESR), 1 +speaker 1 +interpreter 1 +tricked 1 +attackers 1 +denial 1 +(application 1 +crash) 1 +LIVING 1 +TOWER 1 +Phantasy 1 +Landscape 1 +Visiona 1 +Südtirol 1 +Superbike 1 +ramifications 1 +ownership 1 +concentration 1 +dominance 1 +spheres 1 +i.e. 1 +vigilant 1 +encouragement 1 +Città 1 +Parenzo 1 +Trieste 1 +congress 1 +Giusto 1 +audits 1 +mini-suite 1 +wrought 1 +verandah 1 +admiring 1 +synergy 1 +post-2013 1 +GPS 1 +PhotoMapper 1 +NG2 1 +anddetailed 1 +clicks 1 +elastic 1 +cord 1 +was: 1 +conjunction 1 +EMC-show 1 +spirit 1 +Acting 1 +P-5 1 +") 1 +unmatched 1 +interact 1 +Mor 1 +Heileen 1 +spite 1 +robbery 1 +burials 1 +collective 1 +internments 1 +(up 1 +grave). 1 +Collingham 1 +Posselt 1 +helping 1 +Fair 1 +deep-seated 1 +Notebook 1 +Desktop 1 +Laptop 1 +Team 1 +calendars 1 +Password 1 +E-mail 1 +Backup 1 +Move 1 +(optional), 1 +folders 1 +Folders 1 +NT 1 +Yasser 1 +Mohammed 1 +Al-Halabi 1 +interview 1 +Orgeval 1 +Poissy 1 +A13 1 +A14 1 +Campanile 1 +Its 1 +mild 1 +Böge 1 +geographical 1 +boost 1 +supply-chain 1 +resumption 1 +Gracanica 1 +Spreca 1 +Doboj 1 +50km 1 +SPA 1 +Beauty 1 +Miramar 1 +hydrotherapy 1 +ascertain 1 +wide-range 1 +Janusz 1 +Onyszkiewicz 1 +PRADA 1 +NARGESA 1 +metallurgic 1 +amassed 1 +machinery 1 +(folding 1 +guillotines 1 +die-cutting 1 +twisting 1 +profile 1 +formers 1 +revolves 1 +Integrated 1 +uncertain 1 +IMAGINE 1 +EYES: 1 +ophthalmic 1 +optometric 1 +Innovation 1 +Award 1 +BioRIF 1 +honoured 1 +Silver 1 +Medal 1 +OPTO 1 +MIRAO 1 +mirror 1 +Wireless 1 +7.50 1 +commanded 1 +bless 1 +ABAS 1 +Immogo 1 +Ciro 1 +commuted 1 +exile 1 +Articles 1 +Amphiphilic 1 +preferring 1 +implied 1 +prefix 1 +amphi 1 +denoting 1 +radioactive 1 +absolut 1 +well-beeing-feeling 1 +Tauern 1 +Kaprun 1 +recovered 1 +ranking 1 +Filmed 1 +cameras 1 +Xenia 1 +footsoles 1 +filmed 1 +Lagos 1 +unlike 1 +HTTP 1 +Authentication 1 +IIS 1 +cgi.rfc2616 1 +headers 1 +value). 1 +one-boot 1 +though; 1 +editing 1 +config 1 +saving 1 +bootloader 1 +ZAJAZD 1 +U 1 +ELIZY 1 +Ojców 1 +throughway 1 +Krakow-Olkusz 1 +Czajowice 1 +immediate 1 +Krakow 1 +Krakow-Balice 1 +percussive 1 +noiselike 1 +transmitted 1 +Join 1 +SDL 1 +Internship 1 +Program 1 +biennial 1 +grown 1 +animal 1 +husbandry 1 +CITY 1 +MAPS 1 +Clear 1 +at-a-glance 1 +fold 1 +inverts 1 +greediness 1 +quantifiers 1 +hasty 1 +political-military 1 +brink 1 +defeat 1 +steady 1 +proceeded 1 +specialise 1 +MOULDS 1 +disposable 1 +cutlery 1 +multi-cavity 1 +steels 1 +mould 1 +injections 1 +Osijek 1 +hunting 1 +angling 1 +Drava 1 +backwaters 1 +Newspapers 1 +spaces 1 +non-smoking 1 +comparison 1 +feminine 1 +conceived 1 +non-subject 1 +irrational 1 +uncontrolled 1 +identified 1 +corporeal 1 +exclusion 1 +engenders 1 +hatred 1 +Blu-ray 1 +Electronics 1 +sadly 1 +shakedown 1 +frameworks 1 +open-air 1 +Relax 1 +exploring 1 +chat 1 +waited 1 +Gender 1 +budgeting 1 +Markenwerk 1 +nationwide 1 +portal 1 +Sparkasse 1 +effecting 1 +upturn 1 +low-carbon 1 +roughly 1 +2.7 1 +Excluding 1 +consolidated 1 +1,314 1 +encumbered 1 +disproportionately 1 +burden 1 +dad 1 +Ask 1 +Ullmann 1 +specialist 1 +wearable 1 +merchandising 1 +textiles 1 +log-on 1 +Firenze 1 +Introducing 1 +undermine 1 +Facility 1 +(EFSF), 1 +Brescia 1 +realized 1 +collectives 1 +nurseries 1 +CLEANER 1 +hardens 1 +slates 1 +Paragraph 1 +propaganda 1 +commissions 1 +upsell 1 +hours: 1 +20,00 1 +(deliveries 1 +23: 1 +h). 1 +enquiry 1 +Allah 1 +careth 1 +knoweth 1 +268 1 +granteth 1 +pleaseth; 1 +receiveth 1 +overflowing; 1 +grasp 1 +Message 1 +understandable 1 +liberalizing 1 +undertake 1 +INCB 1 +Rühle 1 +sprint 1 +(7.5 1 +women), 1 +biathlete 1 +shoots 1 +shots), 1 +prone 1 +standing 1 +Convention: 1 +listened 1 +debated 1 +Precisely 1 +influenza 1 +pandemic 1 +reviewed 1 +covers 1 +inkjet 1 +plotter 1 +unambiguous 1 +Geaprodukt 1 +Ltd. 1 +1990 1 +suite 1 +nodded 1 +flattest 1 +tray 1 +Villeroy 1 +Boch 1 +SQUARO 1 +Superflat 1 +blends 1 +awards 1 +specialised 1 +Mercator 1 +Press 1 +alternately 1 +Playing 1 +Rated 1 +Visit 1 +Essen 1 +repeating 1 +Cologne 1 +Hendrix 1 +guitarists 1 +gimmickry 1 +exploit 1 +wah-wah 1 +pedal 1 +Staying 1 +ARCOTEL 1 +Velvet 1 +zest 1 +sensuously 1 +shortcomings 1 +minority 1 +overwhelming 1 +Rig 1 +Kontrol 1 +MIDI 1 +plug-ins 1 +DAW 1 +overlooked 1 +wiped 1 +unjustified 1 +commodities 1 +influenced 1 +biofuel 1 +varied 1 +spate 1 +earning 1 +worries 1 +inland 1 +Gallura 1 +Arzachena 1 +posts 1 +illustration 1 +high-handed 1 +demonstration 1 +radar 1 +Smaller 1 +aren 1 +receiver 1 +Fabulous 1 +world-famous 1 +impostors 1 +expired 1 +Chalet 1 +Alcázares 1 +(Murcia) 1 +Detached 1 +Floor 1 +Ref 1 +entory 1 +chain 1 +Mathias 1 +Hlubek 1 +PIIGS 1 +recessions 1 +deepen 1 +costly 1 +LearnForLifeDeutschland 1 +gmail.com 1 +(0) 1 +21758077 1 +2600 1 +Lisec 1 +top-managers 1 +(55.000 1 +48 1 +nations) 1 +sorting 1 +lounges 1 +Memorandum 1 +Understanding 1 +CBD 1 +Secretariat 1 +Environment 1 +Programme 1 +Wave 1 +awareness 1 +Aquaculture 1 +aquatic 1 +ameliorate 1 +otherwise 1 +scrapped 1 +tsunami 1 +underwritten 1 +ELVIA 1 +Insurance 1 +N.V. 1 +(Netherlands), 1 +Nederlandsche 1 +(DNB) 1 +Director-General 1 +happily 1 +participate 1 +experiencing 1 +longstanding 1 +Hezbollah 1 +wrongly 1 +calculated 1 +Adriaticohotels 1 +represents 1 +Silvagni 1 +Cattolica 1 +Gabicce 1 +Mare 1 +hostage 1 +panic 1 +Fukushima 1 +collections 1 +GeneaNet 1 +Privilege 1 +unlimited 1 +Malá 1 +Jasénka 1 +folklore 1 +Dispersion 1 +paste-forming 1 +PVC 1 +polymer 1 +softener 1 +additives 1 +volatile 1 +pays 1 +Cone 1 +état 1 +extradite 1 +Together 1 +okurigana 1 +noun 1 +inflected 1 +adjective 1 +verb: 1 +breach 1 +normal 1 +push 1 +reaching 1 +impose 1 +officeholders 1 +Twenty-Second 1 +guessing 1 +47 1 +resellers 1 +lighten 1 +gray 1 +tones 1 +Eternal 1 +Steps 1 +Personal 1 +Identifiable 1 +factual 1 +identifiable 1 +Titley 1 +Though 1 +automobile 1 +locate 1 +Strip 1 +indifferently 1 +twenty-first 1 +railways 1 +­ 1 +ization 1 +hiring 1 +respecting 1 +adhering 1 +norms 1 +broadcasters 1 +modalities 1 +sacrifice 1 +telecommunications 1 +editions 1 +networking 1 +(use 1 +optionally 1 +systems) 1 +switchboard 1 +locomotive 1 +it). 1 +143 1 +inadmissibility 1 +reintroduced 1 +111 1 +(2). 1 +Xfire 1 +aspiring 1 +touches 1 +machinima 1 +masterpieces 1 +mistake 1 +Simply 1 +recognising 1 +OGG 1 +Ogg 1 +Vorbis 1 +compression 1 +absolute 1 +rotary 1 +encoders 1 +unrivalled 1 +singleturn 1 +16-bit 1 +0.1 1 +stresses 1 +Muslim 1 +marginalized 1 +Take 1 +spotlight 1 +bedding 1 +reina 1 +mi 1 +casa 1 +ha 1 +sido 1 +generosa 1 +acertada 1 +como 1 +casi 1 +siempre 1 +particular: 1 +incorporates 1 +doses 1 +pesticide 1 +Zyklon 1 +Degesch 1 +(Deutsche 1 +Gesellschaft 1 +für 1 +Schädlingsbekämpfung), 1 +42.2 1 +template 1 +Cancun-All 1 +Inclusive 1 +EAASM 1 +undermines 1 +pharma 1 +Pirate 1 +Party), 1 +Haiti 1 +worked 1 +reeling 1 +hurricanes 1 +earthquake 1 +struck 1 +refraining 1 +prohibited 1 +burnished 1 +manuscript 1 +extraordinarily 1 +old; 1 +reproduced 1 +metal 1 +individually 1 +accounted 1 +6.2 1 +lending 1 +solar 1 +amp; 1 +Copper 1 +Action 1 +wars 1 +evenings 1 +pullover 1 +advisable 1 +colony 1 +1819 1 +fortunes 1 +Singapore 1 +island-state 1 +Malaysia 1 +Indonesia 1 +intertwined 1 +paves 1 +workable 1 +Wrong 1 +logon 1 +SSL 1 +Directory 1 +5.x 1 +reachable 1 +frenetic 1 +monolithic 1 +entities 1 +selecting 1 +suppliers 1 +attempt 1 +guaranteeing 1 +Blanca 1 +director 1 +IAEA 1 +Baradei 1 +Iranians 1 +eight 1 +distributors 1 +InsERT 1 +kids 1 +cage 1 +outdoor-enclosure 1 +chicken 1 +Ages 1 +life-like 1 +portraits 1 +tend 1 +gnaw 1 +venom 1 +(Bogert 1 +1956). 1 +grinded 1 +dents 1 +cleared 1 +painted 1 +Hammerite 1 +Has 1 +outbreak 1 +admissibility 1 +From: 1 +dugy 1 +Related 1 +Videos 1 +encountered 1 +Ser 1 +digno 1 +significa 1 +merecedor 1 +algo 1 +lo 1 +visto 1 +simplemente 1 +hecho 1 +seres 1 +vivos 1 +nos 1 +merecemos 1 +morir 1 +underestimate 1 +Fill 1 +CE 1 +caverns 1 +stalactites 1 +stalagmites 1 +dinosaur 1 +footprints 1 +turtle 1 +fossils 1 +cave 1 +drawings 1 +precolumbian 1 +fortresses 1 +remarked 1 +Fiscal 1 +fragile 1 +chest 1 +liquidity 1 +gradually 1 +Latvian 1 +exploited 1 +large-screen 1 +Celesio 1 +Often 1 +counterfeiting 1 +3.2 1 +heels 1 +teenagers 1 +sufferers 1 +acne 1 +teens 1 +expense 1 +mentioned: 1 +earmarking 1 +SGH-P900 1 +quarter 1 +Cup 1 +handset 1 +handle 1 +DMB 1 +sub-Saharan 1 +chipper-shredder 1 +twigs 1 +bark 1 +brush 1 +ploughshare 1 +continually 1 +dinners 1 +lunches 1 +banquets 1 +León 1 +Merkel 1 +goodwill 1 +allegiance 1 +withdrawn 1 +endanger 1 +screenshots 1 +Bounty: 1 +Legend 1 +prominence 1 +manager 1 +line-up 1 +Top-Team 1 +Beijing 1 +4.2.0 1 +Voting 1 +(continuation) 1 +Rossa 1 +(A6-0030 1 +2004). 1 +ambition 1 +Anyone 1 +woke 1 +said: 1 +homosexual 1 +insane 1 +directly: 1 +54 1 +Dell 1 +vigorously 1 +OECD 1 +But: 1 +totally 1 +harmless 1 +expresses 1 +commonly 1 +finvenkistoj 1 +fina 1 +venko 1 +fringes 1 +just-completed 1 +Washington 1 +senior 1 +insurers 1 +handled 1 +DVB 1 +Lisbon: 1 +Christoph 1 +Schaaf 1 +chair 1 +Kabel 1 +cabler 1 +syste 1 +retailers 1 +pre-packaged 1 +straighter 1 +shots 1 +gravity 1 +hits 1 +(Internet 1 +Pricing 1 +Engine). 1 +LG 1 +dissociates 1 +Applecroft 1 +Bed 1 +Breakfast 1 +Carlyon 1 +Austell 1 +possess 1 +Shumen 1 +liberalised 1 +unheard-of 1 +jeopardise 1 +Vidal-Quadras 1 +Budapest 1 +Andr 1 +ssy 1 +(tel: 1 +5500 1 +(international 1 +timetables) 1 +5400 1 +(internal 1 +timetables); 1 +website: 1 +www.mav.hu). 1 +Nearby 1 +Horse 1 +riding 1 +Camargaise 1 +races 1 +canoe 1 +Montpellier 1 +Massane 1 +Compostelle 1 +Domitienne 1 +castel 1 +visists 1 +exhibit 1 +relevance: 1 +ALLROUNDER 1 +370 1 +assembles 1 +toy 1 +buggy 1 +IMPORTANT 1 +NOTE: 1 +PAYPAL 1 +Orders 1 +Paypal 1 +Payment 1 +ONLY 1 +PSE 1 +Groups 1 +33 1 +noteworthy 1 +exceptions 1 +ozone 1 +Beneath 1 +below) 1 +Oporto 1 +parameter 1 +Silva 1 +Splendid 1 +internationalisation 1 +captions 1 +01 1 +Episteme 1 +Notable 1 +Saudi 1 +72 1 +Eurofighter 1 +planes 1 +BAE 1 +Professional 1 +Mobile 1 +(PMR) 1 +requested 1 +discretionary 1 +226 1 +Healthy 1 +People 1 +planet 1 +potency 1 +first-line 1 +regimens 1 +duration 1 +emergence 1 +Treasures 1 +trinkets 1 +trash-the 1 +relics 1 +forth 1 +Bär 1 +Kitzbühel 1 +Tyrol 1 +Ellmau 1 +Wilder 1 +Kaiser 1 +structured 1 +cooperation: 1 +reappearance 1 +novel 1 +Mercosour 1 +dreadful 1 +Water 1 +Framework 1 +Marine 1 +freshwater 1 +Q: 1 +CPU 1 +SWF 1 +communicate 1 +TCP 1 +parse 1 +Majorcan 1 +mansion 1 +transfigured 1 +artful 1 +haunted 1 +nationalistic 1 +jingoistic 1 +ghost 1 +quick 1 +offence 1 +egress 1 +self-contained 1 +orientates 1 +blinded 1 +electoral 1 +legitimacy 1 +transaction 1 +voluntarily 1 +buyer 1 +unifying 1 +thread 1 +suffocating 1 +paternalism 1 +underlies 1 +malaise 1 +subdomain 1 +Klosterbräu 1 +modernise 1 +jumping 1 +coloring 1 +chinese 1 +boxing 1 +diving 1 +Zerbe 1 +California 1 +Guard 1 +Reserve 1 +1960 1 +discharged 1 +1968 1 +St.Gallen 1 +Bad 1 +Ragaz 1 +Rorschach 1 +Altstätten 1 +SG 1 +Alt 1 +Johann 1 +investment: 1 +afford 1 +surfing 1 +researched 1 +circulate 1 +sling 1 +moto 1 +Pg 1 +Juan 1 +Borbo 1 +80-84 1 +Barceloneta) 1 +(+ 1 +93 1 +221 1 +70). 1 +arriving 1 +Balanchine 1 +ballet 1 +Lincoln 1 +Kirstein 1 +Edward 1 +M.M. 1 +(Galtür 1 +Tyrol), 1 +delicacies 1 +easing 1 +situation: 1 +burden-sharing 1 +fleeing 1 +m2 1 +Jazeera 1 +Aerbawip 1 +coral 1 +atolls 1 +multiplicity 1 +creatures 1 +intersting 1 +islands 1 +endemic 1 +merits 1 +Lithuania 1 +pressingly 1 +vulnerable 1 +politically 1 +Wonderful 1 +discharge 1 +EDF 1 +invoked 1 +begun 1 +stood 1 +alive 1 +Offering 1 +Tuberoza 1 +dependancy 1 +pacific 1 +Zakopane 1 +pendii 1 +ice-skate 1 +forecast 1 +Province 1 +Ravenna 1 +Istanbul 1 +stin 1 +poli 1 +acknowledges 1 +unnecessarily 1 +damaged 1 +Presidencies 1 +sleeves 1 +cultivating 1 +restarting 1 +Tours 1 +FREE 1 +MCM-City 1 +Tour 1 +stabilisation 1 +Specifies 1 +Diffie-Hellman 1 +dynamically-generated 1 +1797 1 +1832 1 +inventories 1 +boutique 1 +Opera 1 +Garnier 1 +Galeries 1 +Lafayette 1 +proximity 1 +Moulin 1 +Rouge 1 +Cabaret 1 +Montmartre 1 +refusing 1 +hub 1 +Proscar 1 +prostate 1 +indispensable 1 +Critically 1 +Ecological 1 +meaningfulness 1 +interpretability 1 +EAM 1 +Emotions 1 +passions 1 +comfortably 1 +donate 1 +‑ 1 +Morocco 1 +Maghreb 1 +Damuels 1 +richly 1 +endowed 1 +snow.First 1 +Rider 1 +sighted 1 +invite 1 +gastronomically 1 +again: 1 +Ultner 1 +lamb-weeks 1 +20.09. 1 +05.10 1 +Exit 1 +tollbooth 1 +Imola 1 +Bagnara 1 +Romagna 1 +Founded 1 +1975 1 +Duesseldorf 1 +SB-Warenhaus 1 +METRO 1 +GROUP 1 +retail 1 +Extra 1 +arena 1 +Bernese 1 +alternate 1 +north-eastern 1 +north-western 1 +south-western 1 +coveted 1 +Brünig 1 +wreath 1 +noblemen 1 +timber 1 +persuaded 1 +Tsar 1 +resist 1 +Matured 1 +Speyside 1 +Single 1 +Malt 1 +oak 1 +barrels 1 +Port-Wine 1 +Casks 1 +29 1 +Working 1 +dramatically 1 +retention 1 +courtyard 1 +overlook 1 +panelists 1 +facilitates 1 +Beli 1 +Kamik 1 +pursued 1 +Bangemann 1 +Eurocoton 1 +complaint 1 +TACIS 1 +Booker 1 +Schnaps 1 +bottles 1 +intermediate 1 +invoices 1 +Alternatively 1 +50m2 1 +Peñiscola 1 +sea-views 1 +masturbate 1 +bladder 1 +peeing 1 +orgasm 1 +estimation 1 +coefficients 1 +supposing 1 +proportional 1 +hypothesis 1 +holds 1 +restricting 1 +deteriorate 1 +worsen 1 +repent 1 +sins 1 +unbelief 1 +blindness 1 +heed 1 +lest 1 +two-storey 1 +double-wing 1 +pseudo-Renaissance 1 +polygonal 1 +(e.g. 1 +gender-based 1 +gaps 1 +drop-outs 1 +devastating 1 +AIDS 1 +orphans 1 +children). 1 +19th 1 +demesne 1 +Rummerskirchs 1 +artificial 1 +pearl 1 +mussel 1 +château 1 +millrace; 1 +fry 1 +Holland 1 +subscribe 1 +Mechanism 1 +(ESM), 1 +backstop 1 +disturbances 1 +large-scale 1 +1991 1 +doubts 1 +Pro 1 +prints 1 +watermark 1 +achievements 1 +arctic 1 +impenetrable 1 +Telesina 1 +betting 1 +rounds 1 +internationally 1 +tax) 1 +cache 1 +query 1 +row 1 +prosper 1 +exceedingly 1 +multiply 1 +complimentary 1 +rewarding 1 +margin 1 +manoeuvre 1 +shine 1 +Bibbona 1 +Bolgheri 1 +Castagneto 1 +Carducci 1 +Castiglioncello 1 +Kaklamanis 1 +formularies 1 +preamble 1 +suspension 1 +grave 1 +cookies 1 +cobbled 1 +monster 1 +(EC) 1 +setbacks 1 +256 1 +decisive 1 +manipulate 1 +MathML 1 +specifications 1 +rendering 1 +mathematical 1 +equations 1 +TCS 1 +Special 1 +Anniversary 1 +Edition 1 +350 1 +Campings 1 +(Applause) 1 +platoon 1 +plaything 1 +Driving 1 +mit 1 +playful 1 +Drum 1 +n 1 +Bass 1 +loops 1 +forceful 1 +bass 1 +experimental 1 +synths 1 +5-star 1 +Literature 1 +script 1 +sixteenth 1 +on; 1 +Muhammad 1 +Awzal 1 +(ca 1 +Experience 1 +musicals 1 +Hamburg 1 +frequent 1 +Fourthly 1 +shocks 1 +asymmetry 1 +operative 1 +nurtured 1 +enduring 1 +relationships 1 +Fishing 1 +coastal 1 +contributes 1 +galvanisation 1 +fosters 1 +slow 1 +pendulum 1 +raised; 1 +fast 1 +lowered 1 +decision-taking 1 +positions; 1 +competing 1 +Moroder 1 +Donna 1 +disco 1 +mr 1 +Alfred 1 +Pernold 1 +OeSV 1 +ethical 1 +Fiori 1 +Hyatt 1 +Gold 1 +Passport 1 +reservations 1 +dairy 1 +allocated 1 +Democratic 1 +Congo 1 +Kensington 1 +77 1 +await 1 +U.E.F.A 1 +refereed 1 +referees 1 +AIA 1 +selling 1 +writted 1 +permission 1 +EROMM 1 +Steering 1 +clarification 1 +interpretations 1 +know-how 1 +contributing 1 +academy 1 +marketing-system 1 +vB-Marine 1 +advisors 1 +realizing 1 +runoff 1 +Glacier 1 +greatly 1 +meltwater 1 +safely 1 +refund 1 +purchaser 1 +minus 1 +Orange 1 +burnt 1 +Protestant 1 +Catholic 1 +Tavole 1 +Palatine 1 +B.C. 1 +Doric 1 +greatness 1 +sofware 1 +MMORPG 1 +Winch 1 +Property 1 +Limited 1 +legally-required 1 +Contract 1 +0.75 1 +dismiss 1 +unwise 1 +utopian 1 +aspirations 1 +anxieties 1 +po-pod 1 +po 1 +pod 1 +intimate 1 +secluded 1 +Maldives 1 +backdrop 1 +Lastly 1 +tax: 1 +credible 1 +quorum 1 +resolutely 1 +politicised 1 +Constantinople 1 +liberated 1 +varying 1 +likelihood 1 +recession 1 +cancel 1 +rosier 1 +this: 1 +homework 1 +fueled 1 +4.9 1 +cent 1 +reflections 1 +plebeian 1 +sketched 1 +patricians 1 +plebeians 1 +resultant 1 +immunization 1 +Malaga 1 +10,000 1 +on-board 1 +BTSE 1 +Forces 1 +prudence 1 +reaffirm 1 +non-existent 1 +adopts 1 +partisan 1 +sunshine 1 +entertaining 1 +tranquil 1 +stepping 1 +Ramelleres 1 +Terraza 1 +prolonged 1 +exacerbating 1 +multilateral 1 +extending 1 +tuesday 1 +Runewalker 1 +brave 1 +duel 1 +21: 1 +30h 1 +CET 1 +Cogadh 1 +disarming 1 +militia 1 +attacked 1 +conventions 1 +receptions 1 +10-450 1 +Travelling 1 +Coordination 1 +Driven 1 +expanded 1 +AMC-Gent 1 +temporally 1 +spiritually 1 +Lord; 1 +manoeuver 1 +retreating 1 +subordinates 1 +reports: 1 +tactical 1 +Negative 1 +grip 1 +dismantle 1 +meets 1 +subjective 1 +ethnicity 1 +misinformed 1 +water-cooled 1 +two-cylinder 1 +tractor 1 +hp 1 +Steyr 1 +19.00 1 +Warm 1 +snacks 1 +succour 1 +guerrillas 1 +paramilitaries 1 +slight 1 +modification 1 +Anuncios 1 +Clasificados 1 +Gratuitos 1 +Compra 1 +Venta 1 +Regalos 1 +Contactos 1 +Bolsa 1 +Trabajo 1 +Motor 1 +Servicios 1 +Gratis 1 +issued 1 +DAAD 1 +Rectors 1 +Voice 1 +Universities 1 +universities 1 +colleges 1 +Soviet 1 +analyse 1 +milk 1 +apparently 1 +tensions 1 +transit 1 +Reenactment 1 +fascinating 1 +legends 1 +Island 1 +Elba 1 +widely 1 +lately: 1 +foremost 1 +Markets 1 +Scoreboard 1 +Anthea 1 +luncheon 1 +buffets 1 +lavish 1 +interaction 1 +Source 1 +securing 1 +AKTIV 1 +SENSITIV 1 +models 1 +finish 1 +sequence 1 +behave 1 +sub-heading 1 +PS3 1 +Store 1 +(PC 1 +version), 1 +tennis 1 +Venice 1 +Gritti 1 +donation 1 +abused 1 +conservation 1 +yacht 1 +Conveniently 1 +Guarulhos 1 +Panamby 1 +accommodations 1 +Suitable 1 +(6500 1 +ft) 1 +spools 1 +reels 1 +christened 1 +Tillich-Mulder 1 +Susanne 1 +Pandora 1 +applicants 1 +constituency 1 +(no 1 +debit 1 +cards). 1 +Back 1 +Area 1 +Facebook 1 +broken 1 +isolation 1 +fake 1 +democracies 1 +lingered 1 +Rational 1 +Access 1 +(RPG 1 +OA) 1 +UIs 1 +RIA 1 +iPad 1 +Android 1 +smartphones 1 +Il 1 +Poggetto 1 +southwest 1 +Tuscan 1 +hills 1 +Gimignano 1 +Álvar 1 +Núñez 1 +Cabeza 1 +Vaca 1 +(c 1 +1490 1 +1559) 1 +explorer 1 +protoanthropological 1 +embarked 1 +Hughes 1 +Queen 1 +Numericable 1 +DANCE 1 +soundtrack 1 +trademark 1 +brethren 1 +Nephites; 1 +wherefore 1 +pour 1 +soul 1 +3% 1 +CO2 1 +criminally 1 +abducted 1 +Burma 1 +wrangling 1 +painfully 1 +extracted 1 +constituting 1 +Formerly 1 +drawbridge 1 +pulley 1 +repressive 1 +repression 1 +taxation 1 +savings 1 +Baby 1 +Brittannica 1 +Brittasgang 1 +rewarded 1 +res 1 +released 1 +redistributed 1 +warmest 1 +Bowis 1 +contribution: 1 +sentiments 1 +39 1 +89057168 1 +advise 1 +Rotondo 1 +Segelström 1 +Andersson 1 +Hedh 1 +Hedkvist 1 +Petersen 1 +Westlund 1 +replied 1 +wastes 1 +log 1 +password 1 +Families 1 +Bellariva 1 +discounts 1 +funfairs 1 +bathing 1 +establishments 1 +four-poster 1 +couples 1 +well-founded 1 +Sunworld 1 +Safaris 1 +4x4 1 +Specialist 1 +Kenya 1 +Ladies 1 +greet 1 +midst: 1 +Ignasi 1 +Guardans 1 +Daniel 1 +Caspary 1 +Béla 1 +Glattfelder 1 +Syed 1 +Kamall 1 +Sajjad 1 +Karim 1 +Erika 1 +Jan 1 +Tadeusz 1 +Masiel 1 +Mia 1 +Vits 1 +Sitia 1 +Agia 1 +Fotia 1 +haven 1 +5-6th 1 +Marathon 1 +expo 1 +struggle 1 +addiction 1 +intermediary 1 +Tamil 1 +Sinhalese 1 +demonstrating 1 +progressive 1 +ferruginous 1 +attraction 1 +Bresimo 1 +Booster 1 +Boards 1 +uncomplicated 1 +multiplying 1 +halls 1 +copying 1 +projectors 1 +Booking.com: 1 +Vip 1 +Executive 1 +Eden 1 +Aparthotel 1 +452 1 +Guest 1 +Brunetta 1 +thorough 1 +advances 1 +rapprochement 1 +Science 1 +Research 1 +license 1 +unfinished 1 +ridding 1 +evil 1 +man 1 +Unlike 1 +encoding 1 +algorithms 1 +base64 1 +uuencoding 1 +codegroup 1 +alphabet 1 +patience 1 +Exim 1 +separation 1 +recyclables 1 +(metals 1 +paper) 1 +non-recyclable 1 +landfill 1 +Boulanger 1 +pertinent 1 +insulation 1 +JSP 1 +foams 1 +durability 1 +beverage 1 +portable 1 +RV 1 +sculptor 1 +three-dimensional 1 +sculptures 1 +supercardioid 1 +directional 1 +first-order 1 +gradient 1 +transducer 1 +fermenting 1 +malt 1 +sugars 1 +starch 1 +brewhouse 1 +dioxide 1 +Ozone 1 +occurring 1 +symptomatic 1 +socialism 1 +opportunistic 1 +turncoats 1 +trample 1 +realization 1 +ethnic 1 +teleology 1 +raisons 1 +etat 1 +seaview 1 +1473 1 +Rostyl 1 +funeral 1 +disregarded 1 +underestimated 1 +Turn 1 +Col 1 +Raiser 1 +Sass 1 +Rigais 1 +Importance 1 +certificates 1 +Yours 1 +Ina 1 +fortress 1 +Photo 1 +Cairo 1 +Works 1 +Beirut 1 +Sharjah 1 +Biennial 1 +foundations 1 +Zemat 1 +TG 1 +attended 1 +SIGN 1 +ISTANBUL 1 +Leader 1 +embattled 1 +Islanders 1 +dance 1 +styles 1 +quadrille 1 +au 1 +commandement 1 +zouk 1 +zouk-love 1 +toumbélé 1 +biguine 1 +gwo 1 +ka 1 +downright 1 +objectionable 1 +dependence 1 +widened 1 +gains 1 +8.1% 1 +failing 1 +Conil 1 +Frontera 1 +Jaca 1 +dense 1 +traces 1 +manduca 1 +grows 1 +steering 1 +flip 1 +cab 1 +detects 1 +codec 1 +visualize 1 +120 1 +handicap 1 +wheelchair 1 +B: 1 +Qufu 1 +Train 1 +No.5 1 +PI 1 +FA 1 +SHANG 1 +CHENG 1 +No.1 1 +Yan 1 +Temple 1 +describe 1 +unsatisfactory 1 +biology 1 +VTE 1 +GNOME 1 +Terminal 1 +medicinal 1 +tradesman 1 +expanding 1 +Tina: 1 +MEDIA 1 +Plus 1 +geographic 1 +impressed 1 +2-week 1 +intensive-course 1 +‘ 1 +Basic 1 +Management 1 +Skills 1 +(BMS) 1 +’ 1 +ETH 1 +Prospecting 1 +yields 1 +gem 1 +Salam 1 +veteran 1 +economist 1 +prime 1 +stronger 1 +propose 1 +(red) 1 +Constantinus 1 +prize 1 +JMover 1 +robbed 1 +(death) 1 +assigning 1 +(Ndera 1 +marrun 1 +nuk 1 +shpërblehet 1 +gja 1 +derdhun 1 +gjakut 1 +falun 1 +fisnikërisht 1 +beginnings 1 +Echo 1 +1.5 1 +Panshir 1 +Kabul 1 +100m 1 +volcanic 1 +Playa 1 +Jardin 1 +elevators 1 +intermediaries 1 +invisible 1 +flashing 1 +gestures 1 +Sahara 1 +crossover 1 +Currently 1 +EOS 1 +world-wide 1 +Martok 1 +follows 1 +launches 1 +assault 1 +Romulan 1 +renewed 1 +listens 1 +undertaking 1 +closes 1 +concentrates 1 +decrease 1 +Sogdians 1 +Chinggis 1 +Khan 1 +Buddhism 1 +boast 1 +Echternach 1 +Adjustment 1 +stepped 1 +pro-cyclical 1 +ports 1 +hinterland 1 +destroy 1 +endangered 1 +paradoxical 1 +re-establish 1 +Chiclana 1 +browsing 1 +PFOS 1 +PFOS-related 1 +fluorinated 1 +into; 1 +warrants 1 +extradition 1 +Treatment 1 +shoe 1 +Andreasen 1 +DIMDI 1 +Medical 1 +Documentation 1 +institute 1 +Health 1 +(BMG). 1 +rainfall 1 +north-east 1 +monsoon 1 +winds 1 +tempreature 1 +C. 1 +welcoming 1 +commission 1 +advising 1 +pardons 1 +unjustly 1 +convicted 1 +trickles 1 +jaguar 1 +facts 1 +Capri 1 +flavor 1 +knowledge-based 1 +repeat 1 +polar 1 +Neumayer 1 +impressively 1 +(like 1 +rivetting 1 +bolting) 1 +bonding 1 +ecosystem 1 +female 1 +worst 1 +part: 1 +Recycle 1 +Bin 1 +doesn 1 +deleted 1 +7200 1 +yards 1 +visible 1 +male 1 +long-term 1 +improvement 1 +prospects 1 +waded 1 +bull 1 +familiar 1 +Homely 1 +confortable 1 +comforts 1 +vice 1 +versa 1 +softening 1 +toys 1 +documentaries 1 +1860s 1 +evolved 1 +decadent 1 +drunk 1 +bite-size 1 +morsels 1 +delicately 1 +Hondurans 1 +respectful 1 +2% 1 +39.7 1 +Sitemap: 1 +detailled 1 +Etschtal 1 +1905 1 +Czechs 1 +Slovene 1 +user-base 1 +Andi 1 +Rasmus 1 +Zeev 1 +announce 1 +3.0 1 +successor 1 +halted 1 +Starcraft 1 +Canal 1 +esures 1 +outmost 1 +greeted 1 +sumptuous 1 +panelling 1 +antique 1 +veneered 1 +walls 1 +sea-shore 1 +skin 1 +Osuna 1 +Крім 1 +того 1 +ми 1 +рекомендуємо 1 +пограти 1 +також 1 +з 1 +Румунську 1 +Англійську 1 +уроками 1 +Minerva 1 +Biolabs 1 +craftspeople 1 +belief 1 +seeds 1 +sown 1 +Tsatsos 1 +flourishing 1 +Latour 1 +Maubourg 1 +boasts 1 +jars 1 +recyclable 1 +LEDON 1 +Lighting 1 +Zumtobel 1 +preferential 1 +viewpoint 1 +pluralist 1 +polemical 1 +colonial 1 +oppressor 1 +contradiction 1 +Isamar 1 +bracelet 1 +districts 1 +Odyssey 1 +Waterfront 1 +Enhancer 1 +improves 1 +sharpness 1 +blurred 1 +quarrels 1 +Kosovo 1 +Crespi 1 +ebook 1 +Flamenco 1 +accompany 1 +wellbeing 1 +home-grown 1 +allocate 1 +talented 1 +misleading 1 +Nationals 1 +embassy 1 +Addresses). 1 +well-trained 1 +vitality 1 +Oamos 1 +atrocity 1 +focuses 1 +Schiphol 1 +baggage 1 +drop-off 1 +5: 1 +00am 1 +Bees 1 +chain: 1 +bees 1 +beekeepers 1 +crops 1 +adversely 1 +Overtone 1 +Analyzer 1 +overtone 1 +expands 1 +feedback 1 +harmonics 1 +sung 1 +stitching 1 +PT 1 +Assembler 1 +pragmatic 1 +hasn 1 +Spiromat 1 +applet 1 +Foreword 1 +Barry 1 +Morse 1 +theatrical 1 +memoir 1 +Remember 1 +Advantages 1 +drug 1 +addicts 1 +kick 1 +vitro 1 +TiF4 1 +formation 1 +carious 1 +erosive 1 +enamel 1 +dentine 1 +lesions 1 +realities 1 +ignored 1 +moratorium 1 +53 1 +cruel 1 +inhuman 1 +punishment 1 +degenerative 1 +Alzheimer 1 +Parkinson 1 +rocketed 1 diff --git a/data/vocab-train.tgt b/data/vocab-train.tgt new file mode 100644 index 0000000000..c39540f817 --- /dev/null +++ b/data/vocab-train.tgt @@ -0,0 +1,15208 @@ +, 3676 +. 2927 +die 1838 +und 1703 +der 1668 +in 895 +zu 681 +von 667 +den 666 +ist 583 +für 533 +mit 501 +das 460 +auf 430 +eine 400 +nicht 375 +im 365 +Sie 363 +dass 346 +werden 346 +des 343 +wir 334 +dem 326 +Die 314 +ein 307 +sich 304 +es 291 +sind 280 +auch 268 +an 237 +wird 227 +ich 227 +- 219 +um 204 +oder 191 +haben 190 +als 189 +Ich 180 +hat 175 +einer 175 +über 171 +" 166 +bei 161 +wie 159 +sie 158 +zur 157 +aus 155 +Das 151 +einen 151 +Wir 146 +" 145 +nach 141 +können 137 +einem 131 +! 124 +nur 124 +zum 120 +kann 115 +uns 112 +Der 109 +diese 109 +so 107 +Es 106 +Herr 106 +vor 106 +Kommission 104 +? 103 +aber 102 +wurde 101 +daß 101 +sehr 93 +dieser 91 +durch 90 +wenn 88 +noch 87 +diesem 84 +In 83 +müssen 83 +am 82 +alle 81 +möchte 76 +mehr 76 +sein 75 +Präsident 74 +war 74 +keine 72 +er 71 +man 70 +hier 70 +dieses 69 +gibt 69 +unter 68 +unsere 62 +eines 62 +vom 62 +/ 61 +ihre 60 +was 60 +Mitgliedstaaten 59 +Ihnen 58 +bis 57 +... 57 +Union 56 +sowie 55 +Hotel 55 +Europäischen 54 +Europa 53 +muss 51 +zwischen 51 +Bericht 50 +immer 50 +wurden 49 +anderen 48 +Parlament 47 +heute 47 +Frau 46 +Zeit 46 +mich 45 +Ihre 45 +darauf 44 +Im 44 +dann 44 +Wenn 43 +ihrer 43 +damit 43 +neuen 43 +diesen 43 +Ein 43 +Unternehmen 42 +Dies 42 +europäischen 41 +Diese 40 +sollten 40 +unserer 40 +EU 39 +zwei 39 +habe 39 +de 38 +Rat 37 +jedoch 37 +denen 37 +sondern 36 +machen 36 +bietet 36 +bin 36 +dies 36 +liegt 36 +jetzt 36 +Wie 36 +Mit 36 +wollen 35 +Entwicklung 35 +einige 35 +Frage 35 +viele 35 +neue 34 +Jahren 34 +Jahre 34 +da 33 +weil 33 +dazu 33 +mir 33 +gegen 32 +Herrn 32 +Für 32 +schon 32 +Und 32 +stehen 32 +geht 31 +meine 31 +sollte 31 +dabei 31 +Menschen 31 +ohne 30 +Bei 30 +wieder 30 +seine 30 +allem 30 +waren 30 +Jahr 29 +während 29 +ganz 29 +möglich 29 +seit 28 +beim 28 +unterstützen 28 +Eine 28 +deren 27 +Probleme 27 +selbst 27 +Kinder 27 +Art 27 +Lage 27 +andere 27 +Ländern 27 +Fragen 27 +finden 27 +seiner 27 +bereits 26 +Unterstützung 26 +welche 26 +Zusammenarbeit 26 +soll 26 +ab 26 +Bedeutung 26 +dafür 26 +vielen 26 +Welt 26 +Informationen 26 +würde 26 +Problem 25 +Maßnahmen 25 +also 25 +gut 25 +Europäische 25 +ersten 25 +befindet 24 +doch 24 +Thema 24 +Bürger 24 +ihren 24 +ob 24 +Auf 24 +Zimmer 24 +Daten 24 +' 24 +letzten 24 +sagen 23 +Seite 23 +Namen 23 +geben 23 +allen 23 +2 23 +führen 23 +einmal 23 +große 23 +Verfügung 23 +wäre 23 +Arbeit 23 +viel 23 +insbesondere 23 +unseren 22 +Rahmen 22 +daher 22 +Um 22 +muß 22 +wichtig 22 +darüber 22 +Länder 22 +Auch 22 +nun 22 +Sicherheit 21 +pro 21 +Kommissar 21 +Präsidentin 21 +politischen 21 +Von 21 +Weg 21 +ihr 21 +kommen 21 +Tag 21 +Beispiel 21 +bieten 21 +großen 21 +etwas 21 +eigenen 21 +Kollegen 20 +innerhalb 20 +10 20 +sowohl 20 +stellen 20 +Möglichkeit 20 +Fall 20 +worden 20 +tun 20 +Frauen 20 +Seiten 20 +ja 20 +Teil 20 +aller 20 +4 20 +entfernt 19 +seinen 19 +wissen 19 +hatte 19 +Ziel 19 +Land 19 +schnell 19 +Regierung 19 +Bereich 19 +Dank 19 +direkt 19 +besonders 19 +System 19 +aufgrund 19 +Haus 19 +internationalen 19 +Nach 18 +Uhr 18 +drei 18 +denn 18 +Kunden 18 +will 18 +ihn 18 +Situation 18 +Dieser 18 +einfach 18 +Dieses 18 +politische 18 +Meinung 18 +Aussprache 18 +Er 18 +verschiedenen 18 +allerdings 18 +& 18 +brauchen 18 +Ihr 18 +Alle 18 +erreichen 18 +Vorschlag 18 +nehmen 17 +weit 17 +Doch 17 +steht 17 +gestimmt 17 +sehen 17 +Weise 17 +Ihrem 17 +ihrem 17 +erhalten 17 +Parlaments 16 +würden 16 +etwa 16 +beiden 16 +alles 16 +Hier 16 +Lösung 16 +davon 16 +1 16 +Grund 16 +Liste 16 +Zugang 16 +Aber 16 +Als 16 +weiß 16 +Artikel 16 +deshalb 16 +3 16 +5 16 +[ 16 +meiner 16 +ihnen 16 +weniger 16 +Restaurant 16 +Nähe 16 +Stadt 16 +Bitte 16 +bitte 16 +seinem 16 +gilt 16 +gehört 16 +nationalen 15 +Ziele 15 +wo 15 +Berichterstatter 15 +meinen 15 +2009 15 +deutlich 15 +wirklich 15 +Deshalb 15 +weiter 15 +gerade 15 +Schutz 15 +Zukunft 15 +natürlich 15 +15 15 +besteht 15 +wichtiger 15 +Produkte 15 +Abstimmung 15 +Personen 15 +lange 15 +Ende 15 +werde 15 +unser 15 +solche 15 +unserem 15 +hoffe 15 +Politik 15 +Zu 15 +daran 15 +Ihrer 15 +Rates 14 +nächsten 14 +gesagt 14 +dort 14 +oft 14 +Gemeinschaft 14 +betrifft 14 +Staaten 14 +Da 14 +Qualität 14 +Ort 14 +ermöglicht 14 +Person 14 +sogar 14 +Antwort 14 +Mitglieder 14 +2006 14 +erster 14 +Bezug 14 +europäische 14 +Abgeordneten 14 +gegenüber 14 +sollen 14 +Was 14 +Tatsache 14 +verwenden 14 +Gäste 14 +Vertrag 14 +danken 13 +Form 13 +gemacht 13 +Anfang 13 +jeden 13 +enthalten 13 +Zusammenhang 13 +all 13 +ebenfalls 13 +kein 13 +bringen 13 +schaffen 13 +2010 13 +hinsichtlich 13 +bekommen 13 +2008 13 +bereit 13 +weiteren 13 +Bürgern 13 +darf 13 +kleinen 13 +Verantwortung 13 +Strategie 13 +unterstützt 13 +hätte 13 +Europas 13 +erreicht 13 +du 13 +wirtschaftlichen 13 +USA 13 +gewährleisten 13 +bzw. 13 +stellt 13 +gerne 13 +könnte 13 +guten 12 +erfahren 12 +Unsere 12 +zwar 12 +Region 12 +entwickelt 12 +Bedingungen 12 +Millionen 12 +Version 12 +gute 12 +Punkt 12 +wichtige 12 +of 12 +la 12 +Reihe 12 +bestimmte 12 +klar 12 +wohl 12 +glaube 12 +Verfahren 12 +meisten 12 +wenig 12 +] 12 +gegeben 12 +Hotels 12 +gehen 12 +großer 12 +helfen 12 +bestimmten 12 +sprechen 12 +einigen 12 +zeigt 12 +gemäß 12 +Beitrag 12 +Beziehungen 12 +Lissabon 12 +� 12 +Besuch 12 +Ihren 12 +Abkommen 11 +Verhandlungen 11 +denke 11 +Kolleginnen 11 +Debatte 11 +völlig 11 +a 11 +mehrere 11 +genau 11 +keinen 11 +Rolle 11 +weiterhin 11 +Küche 11 +Hilfe 11 +Änderungsantrag 11 +Daher 11 +hinaus 11 +spielen 11 +Bad 11 +Anwendung 11 +So 11 +Herzen 11 +Interessen 11 +Dialog 11 +hatten 11 +bekannt 11 +| 11 +somit 11 +Hand 11 +möchten 11 +richtig 11 +Hinblick 11 +Einrichtungen 11 +Änderungsanträge 11 +Durch 11 +Schaffung 11 +europäischer 11 +China 11 +weitere 11 +Oktober 11 +Förderung 11 +indem 11 +erste 11 +weltweit 11 +Richtlinie 11 +Euro 11 +zusammen 10 +Tagen 10 +Leben 10 +Mai 10 +Mitarbeiter 10 +Außerdem 10 +Bestimmungen 10 +ebenso 10 +Bereichen 10 +schriftlich 10 +jedem 10 +vielleicht 10 +genießen 10 +Blick 10 +Verwendung 10 +internationale 10 +wirtschaftliche 10 +geführt 10 +Platz 10 +notwendig 10 +geschlossen 10 +Internet 10 +verfügen 10 +eigentlich 10 +führt 10 +nämlich 10 +25 10 +Geld 10 +Einsatz 10 +Auswirkungen 10 +findet 10 +Gruppe 10 +Markt 10 +Berlin 10 +gestellt 10 +Elemente 10 +lassen 10 +besser 10 +Verfassung 10 +Fraktion 10 +Stelle 10 +bleiben 10 +konzentrieren 10 +per 10 +morgen 10 +jeder 10 +Rechte 10 +2000 10 +Aussicht 10 +könnten 10 +Wirtschaft 10 +Änderung 10 +Nutzung 10 +Menschenrechte 10 +Zum 10 +€ 10 +beitragen 10 +gesamte 10 +GmbH 10 +Entschließung 10 +arbeiten 10 +öffentlichen 10 +Aus 10 +Recht 10 +hervorragende 10 +Regeln 10 +Vorschläge 10 +Deutschland 10 +Unser 10 +scheint 10 +nutzen 10 +Mal 10 +Man 10 +Richtung 10 +Am 10 +Druck 10 +Minuten 10 +bestimmt 9 +besten 9 +versucht 9 +Wachstum 9 +Binnenmarkt 9 +schließlich 9 +gleichzeitig 9 +Behörden 9 +überhaupt 9 +Falls 9 +halten 9 +Osten 9 +Homepage 9 +Produktion 9 +Seit 9 +Tage 9 +dar 9 +leben 9 +fördern 9 +Ebene 9 +Umgebung 9 +sozialen 9 +Gewalt 9 +dürfen 9 +macht 9 +äußerst 9 +eingesetzt 9 +bestehen 9 +besuchen 9 +kommt 9 +Damit 9 +Krise 9 +Website 9 +Darüber 9 +sagte 9 +lässt 9 +Während 9 +Kosten 9 +Bild 9 +Angebot 9 +Zentrum 9 +gebracht 9 +dessen 9 +darin 9 +Bürgerinnen 9 +Funktion 9 +unten 9 +0 9 +freue 9 +gab 9 +mehreren 9 +kleine 9 +rasch 9 +vergessen 9 +Mitglied 9 +hinter 9 +ca. 9 +Firma 9 +Türkei 9 +ermöglichen 9 +fast 9 +nahe 9 +Terrasse 9 +Russland 9 +erforderlich 9 +Kontrolle 9 +ganze 9 +erst 9 +darum 9 +Aufmerksamkeit 9 +Erfahrung 9 +Gesellschaft 9 +verpflichtet 9 +ausgestattet 9 +verschiedene 9 +Verbraucher 9 +benutzt 9 +zusätzliche 9 +Bedürfnisse 9 +solchen 9 +Falle 8 +Damen 8 +erwarten 8 +Paris 8 +Schritt 8 +bessere 8 +beginnt 8 +30 8 +Einige 8 +2007 8 +_ 8 +Dabei 8 +Dokument 8 +Wer 8 +schwer 8 +12 8 +obwohl 8 +Verbesserung 8 +Position 8 +Gelegenheit 8 +leider 8 +Jahres 8 +Gebiet 8 +Sport 8 +hohen 8 +Ergebnis 8 +kostenlos 8 +verbundenen 8 +Meiner 8 +Mittel 8 +Nutzen 8 +Fehler 8 +verantwortlich 8 +Umsetzung 8 +Bildung 8 +Kraft 8 +Ergebnisse 8 +begrüße 8 +Sache 8 +50 8 +Linie 8 +sei 8 +Meer 8 +Programm 8 +Open 8 +kurz 8 +zahlreiche 8 +8 8 +erwähnt 8 +Entscheidung 8 +Notwendigkeit 8 +gelegen 8 +geleistet 8 +zweiten 8 +erwartet 8 +Geschichte 8 +Anerkennung 8 +.. 8 +Schlusselwortern 8 +Site 8 +schützen 8 +französischen 8 +Angaben 8 +online 8 +neu 8 +möglicherweise 8 +Suche 8 +Einkommen 8 +unterschiedliche 8 +meinem 8 +Nur 8 +anderes 8 +befinden 8 +hin 8 +statt 8 +zugleich 8 +liegen 8 +handelt 8 +Insel 8 +nichts 8 +Handel 8 +zudem 8 +mindestens 8 +Verständnis 8 +umgesetzt 8 +Natürlich 8 +unbedingt 8 +berücksichtigen 8 +gesamten 8 +jedes 8 +Juli 8 +tatsächlich 8 +Schwierigkeiten 8 +verbessern 8 +Initiative 8 +sicher 8 +Aktivitäten 8 +konnte 8 +16 8 +Hauses 8 +Bilder 8 +gefordert 8 +Zahl 8 +EUR 8 +Inhalt 7 +Herren 7 +jeweiligen 7 +Tagesordnung 7 +versuchen 7 +200 7 +bedeutet 7 +geworden 7 +Hinsicht 7 +by 7 +Frankreich 7 +meines 7 +wichtigsten 7 +liebe 7 +18 7 +Vertrauen 7 +akzeptieren 7 +vier 7 +Kooperation 7 +neuer 7 +diejenigen 7 +Atmosphäre 7 +genannten 7 +Bis 7 +jüngsten 7 +zeigen 7 +Forderungen 7 +Interesse 7 +Produkten 7 +suchen 7 +beachten 7 +betroffenen 7 +Änderungen 7 +Teilen 7 +Punkte 7 +ausdrücklich 7 +Verbindung 7 +durchgeführt 7 +Erklärung 7 +prüfen 7 +voll 7 +wobei 7 +Sommer 7 +Verordnung 7 +öffentliche 7 +Restaurants 7 +Tagung 7 +entschieden 7 +gemeinsame 7 +befassen 7 +mittels 7 +d 7 ++ 7 +nimmt 7 +Entwicklungen 7 +konnten 7 +Arbeitsplätze 7 +aktiv 7 +Milliarden 7 +richtigen 7 +13 7 +Verwaltung 7 +sicherzustellen 7 +bleibt 7 +Amerika 7 +insgesamt 7 +vollständig 7 +warten 7 +sagt 7 +sorgen 7 +besondere 7 +Dienstleistungen 7 +Strategien 7 +Ausbildung 7 +Behandlung 7 +2005 7 +Wegen 7 +Institutionen 7 +m 7 +ehemaligen 7 +einzelnen 7 +Wasser 7 +Einführung 7 +Anfragen 7 +letzte 7 +Angebote 7 +Klicken 7 +entsprechenden 7 +Ansicht 7 +Fällen 7 +Gott 7 +neues 7 +aussprechen 7 +Zur 7 +Märkte 7 +legen 7 +dringend 7 +Polen 7 +Präsidentschaft 7 +Jeder 7 +Wert 7 +vorgeschlagen 7 +wollte 7 +Foto 7 +komplett 7 +begann 7 +ins 7 +Wort 7 +Frieden 7 +Geschäftsordnung 7 +Woche 7 +zweifellos 7 +Vertrags 7 +Vor 7 +danke 7 +Projekt 7 +gestern 7 +Software 7 +AG 7 +verbunden 7 +starten 7 +zunächst 7 +verbringen 7 +überzeugt 7 +begrüßen 7 +en 7 +ergreifen 7 +Grundlage 7 +Regierungskonferenz 7 +Integration 7 +di 7 +s 7 +verhindern 7 +größten 7 +zufrieden 7 +Grenzen 6 +Angelegenheit 6 +Gästen 6 +Fortschritte 6 +Effizienz 6 +usw. 6 +zuletzt 6 +vertreten 6 +aufnehmen 6 +lokalen 6 +Spiel 6 +durchaus 6 +verstehen 6 +fahren 6 +Information 6 +Grunde 6 +verwendet 6 +Tat 6 +Natur 6 +ideal 6 +hervorgehoben 6 +(EN) 6 +vergangenen 6 +Nahen 6 +mein 6 +Den 6 +verwende 6 +Installation 6 +Definition 6 +betonen 6 +erlaubt 6 +entwickeln 6 +à 6 +Gebiete 6 +Fischerei 6 +dagegen 6 +gehören 6 +erfüllt 6 +11 6 +aufgenommen 6 +entsprechend 6 +hohe 6 +korrekt 6 +Folgen 6 +Demokratie 6 +wahrscheinlich 6 +anderer 6 +Wichtig 6 +neben 6 +Projekte 6 +Wann 6 +bevor 6 +eher 6 +deutsche 6 +Mitteln 6 +privaten 6 +Dusche 6 +Prozent 6 +zurück 6 +Wettbewerb 6 +Grand 6 +gegründet 6 +mag 6 +Annahme 6 +Schlüssel 6 +beobachten 6 +gezeigt 6 +Aspekte 6 +Management 6 +leicht 6 +Landwirtschaft 6 +Kultur 6 +An 6 +Lassen 6 +Umwelt 6 +nennen 6 +umfassende 6 +Experten 6 +unterschiedlichen 6 +14 6 +anzeigen 6 +jede 6 +Minister 6 +ändern 6 +vorgenommen 6 +Abschließend 6 +24 6 +teilweise 6 +her 6 +hoch 6 +Technologien 6 +einig 6 +Dienste 6 +Juni 6 +nationale 6 +riesigen 6 +Frühstücksbuffet 6 +Betrieb 6 +Wunsch 6 +stark 6 +deutschen 6 +geöffnet 6 +Eltern 6 +Veranstaltungen 6 +rund 6 +Preis 6 +Rechtsvorschriften 6 +Ruhe 6 +derzeit 6 +27 6 +1. 6 +Ist 6 +Dazu 6 +treten 6 +Leute 6 +New 6 +wenige 6 +Kandidaten 6 +Solche 6 +nie 6 +beginnen 6 +vieler 6 +Sollten 6 +modernen 6 +® 6 +Norden 6 +Stellen 6 +Haltung 6 +genug 6 +Systems 6 +Waffen 6 +gar 6 +vorgesehen 6 +Spieler 6 +Berichterstatterin 6 +November 6 +speziellen 6 +relativ 6 +Kind 6 +genutzt 6 +Worte 6 +Dr. 6 +sobald 6 +Großteil 6 +Lösungen 6 +40 6 +Parteien 6 +Idee 6 +Bestandteil 6 +Solidarität 6 +größere 6 +Investitionen 6 +Willen 6 +erfolgreichen 6 +17 6 +™ 6 +Möglichkeiten 6 +betroffen 6 +ruhigen 6 +angeboten 6 +2003 6 +Stabilität 6 +De 6 +fest 6 +Drittländern 6 +Reformen 6 +freien 6 +zahlen 6 +angenommen 6 +Öffentlichkeit 6 +Vorschriften 6 +Sein 6 +technischen 6 +Japan 6 +anderem 6 +enthält 6 +Regierungen 6 +Neben 6 +tragen 6 +20 6 +wichtigen 6 +Medien 6 +lösen 6 +schlecht 6 +Anzahl 6 +Allerdings 6 +kaum 6 +Computer 6 +gewinnen 6 +bisher 6 +del 6 +Stunden 6 +Flughafen 6 +entscheiden 6 +sieht 6 +and 6 +Aufnahme 6 +Gästebewertungen 6 +Ausschuss 6 +offene 6 +verfügt 6 +wenigen 6 +gekommen 6 +geschaffen 6 +Diskriminierung 6 +interessante 6 +entspannen 6 +gleichen 6 +gewährt 6 +spiele 6 +Zweck 5 +angezeigt 5 +Aktionen 5 +außer 5 +Vertreter 5 +gleich 5 +Ausschuß 5 +Werkzeuge 5 +schönen 5 +Landschaft 5 +Gefahr 5 +Organisation 5 +Einfluss 5 +seien 5 +Erweiterung 5 +Technik 5 +Kompromiss 5 +Standpunkt 5 +This 5 +page 5 +last 5 +modified 5 +Republik 5 +unterrichten 5 +Gesundheit 5 +setzen 5 +begeben 5 +Erachtens 5 +unterstütze 5 +Energie 5 +Zeitraum 5 +aktuelle 5 +dritte 5 +Kampf 5 +Vereinigten 5 +Unter 5 +Österreich 5 +einverstanden 5 +Herausforderung 5 +Bar 5 +Verbrechen 5 +angeht 5 +Meter 5 +benötigen 5 +oben 5 +garantiert 5 +Dann 5 +ernsthaft 5 +regionale 5 +kümmern 5 +solch 5 +notwendigen 5 +Koordinierung 5 +zuerst 5 +angewandt 5 +Diskussion 5 +agieren 5 +keiner 5 +entsprechende 5 +sorgfältig 5 +geprüft 5 +Kontakt 5 +politischer 5 +WLAN 5 +weiteres 5 +Ratspräsident 5 +strengen 5 +Feld 5 +Ausdruck 5 +Programme 5 +Insbesondere 5 +A 5 +täglich 5 +betreut 5 +Urlaub 5 +Banken 5 +Spanien 5 +beschränken 5 +500 5 +km 5 +d. 5 +h. 5 +Kollegin 5 +ging 5 +Protokoll 5 +keinerlei 5 +alten 5 +verfügbar 5 +Dezember 5 +Forschung 5 +aktuellen 5 +Herausforderungen 5 +verurteilt 5 +un 5 +genannt 5 +wegen 5 +Jetzt 5 +bitten 5 +finanzielle 5 +außerordentlich 5 +erstellt 5 +Garten 5 +kürzlich 5 +Einigung 5 +integriert 5 +Sektoren 5 +X 5 +Wahl 5 +Verhalten 5 +Gipfel 5 +übernachten 5 +Licht 5 +Regelung 5 +wesentlich 5 +400 5 +Angelegenheiten 5 +Dennoch 5 +stets 5 +Ehre 5 +endlich 5 +Dienst 5 +Unterschiede 5 +Hersteller 5 +garantieren 5 +erfolgreich 5 +hingewiesen 5 +soziale 5 +9. 5 +einiger 5 +erfüllen 5 +aufgeführt 5 +Regelungen 5 +Stil 5 +Problems 5 +Speisen 5 +800 5 +spricht 5 +vollkommen 5 +Ratschlag: 5 +aufgezahlt 5 +Finde 5 +00 5 +zentrale 5 +je 5 +Verfügbarkeit 5 +denken 5 +gelten 5 +Mitteilung 5 +fehlt 5 +Gleichgewicht 5 +finde 5 +dank 5 +häufig 5 +Monate 5 +ganzen 5 +Staat 5 +geboten 5 +unternehmen 5 +Power 5 +Jahrhundert 5 +York 5 +bald 5 +Wissenschaft 5 +et 5 +(PL) 5 +7 5 +Garantie 5 +außerdem 5 +Börse 5 +hinweisen 5 +entspricht 5 +bestimmter 5 +Berichte 5 +Willkommen 5 +großes 5 +Praxis 5 +St. 5 +22 5 +beträgt 5 +Familie 5 +Technologie 5 +Sinne 5 +unseres 5 +z.B. 5 +frei 5 +berücksichtigt 5 +inzwischen 5 +Vorhaben 5 +26 5 +Familien 5 +Server 5 +größeres 5 +Anteil 5 +Prozess 5 +Gebäude 5 +fordern 5 +Videos 5 +genauso 5 +dient 5 +Absatz 5 +vorgeschlagenen 5 +Darstellung 5 +eröffnet 5 +The 5 +Landes 5 +größte 5 +Zustimmung 5 +Zeitpunkt 5 +Februar 5 +Macht 5 +angesprochen 5 +kleiner 5 +Nicht 5 +Bevölkerung 5 +laut 5 +Umfeld 5 +Team 5 +Du 5 +praktisch 5 +Themen 5 +gesetzt 5 +6 5 +verbessert 5 +angemessen 5 +Web 5 +nachdem 5 +beispielsweise 5 +Zunächst 5 +Rede 5 +wünschen 5 +Club 5 +kaufen 5 +Aufgabe 5 +Bau 5 +gegenwärtigen 5 +Austausch 5 +Nun 5 +Verbot 5 +hält 5 +verbreitet 5 +einzige 5 +Empfang 5 +Prozesse 5 +Ausgaben 5 +beglückwünschen 5 +betrachtet 5 +entsprechen 5 +1999 5 +Veränderungen 5 +ferner 5 +Bedarf 5 +langen 5 +Wirkung 5 +lang 5 +Bruder 5 +Werbung 5 +eingeführt 5 +Costa 5 +Anlage 5 +Überprüfung 5 +ausreichend 5 +Park 5 +ihm 5 +Vielzahl 5 +Analyse 5 +wählen 5 +Regierungschefs 5 +verlieren 5 +hauptsächlich 5 +angeben 5 +Euch 5 +verfolgt 5 +Hoffnung 5 +zulassen 5 +Abschluss 5 +B. 5 +Erbe 5 +fünf 5 +erinnern 5 +Entwurf 5 +April 5 +Empfehlungen 5 +sofort 5 +erzielen 5 +freundliche 5 +Nachfolgend 5 +entlang 5 +Vergangenheit 5 +gebaut 5 +bewusst 5 +demokratischen 5 +Auffassung 5 +interne 5 +folgen 5 +Wettbewerbsfähigkeit 5 +übertragen 5 +technische 5 +beide 5 +Stadtzentrum 5 +Amsterdam 5 +Verpflichtungen 5 +euch 5 +getan 5 +dennoch 5 +Buchung 5 +Übernachtung 5 +gemeinsam 5 +PHP 5 +arbeitet 4 +Personal 4 +kam 4 +Ja 4 +Kapazität 4 +Prinzip 4 +ausgeschlossen 4 +Beiträge 4 +später 4 +2001 4 +Unterkunft 4 +junge 4 +erarbeiten 4 +schöne 4 +zerstört 4 +Geschwindigkeit 4 +geändert 4 +ihres 4 +Organisationen 4 +welchen 4 +beziehen 4 +Service 4 +34 4 +Polizei 4 +allgemeine 4 +vermutlich 4 +erstmals 4 +offenbar 4 +Linux 4 +Dinge 4 +Premierminister 4 +Zweitens 4 +Weitere 4 +Autobahn 4 +links 4 +Monaten 4 +recht 4 +Mädchen 4 +interessiert 4 +Instrumente 4 +reduzieren 4 +Galerie 4 +besichtigen 4 +digitale 4 +kulturelle 4 +Zwecke 4 +schwierigen 4 +Gefängnis 4 +erweitert 4 +Schweiz 4 +Fahrer 4 +entstand 4 +verweisen 4 +Erhöhung 4 +Beginn 4 +Mann 4 +Ausland 4 +Wirklichkeit 4 +Freunden 4 +bedeuten 4 +unterschiedlich 4 +* 4 +ziehen 4 +Zusätzlich 4 +Doppelzimmer 4 +jeweils 4 +richtige 4 +Jahreszeit 4 +gleiche 4 +Regionen 4 +Industrie 4 +Fälle 4 +jedenfalls 4 +selbstverständlich 4 +eingereicht 4 +Konsens 4 +fördert 4 +gefunden 4 +aufgefordert 4 +Anschluss 4 +Nationen 4 +bestehenden 4 +komfortablen 4 +Zimmern 4 +World 4 +gegenwärtige 4 +European 4 +Central 4 +Ursachen 4 +letztendlich 4 +Bemerkungen 4 +Ware 4 +Instrument 4 +Kunst 4 +erklären 4 +Ukraine 4 +besitzt 4 +chinesischen 4 +länger 4 +19. 4 +Antrag 4 +informiert 4 +Bewegung 4 +produziert 4 +befand 4 +umgeben 4 +braucht 4 +Botschaft 4 +verabschieden 4 +wer 4 +Risiko 4 +Erstens 4 +nächste 4 +gemeinsamen 4 +Palace 4 +Mittelmeer 4 +geringer 4 +Start 4 +März 4 +widmen 4 +außerhalb 4 +Kollege 4 +Sprache 4 +Abgeordnete 4 +Balkon 4 +gewählt 4 +möglichst 4 +Alles 4 +reine 4 +Texte 4 +Beitritt 4 +Opfer 4 +Korruption 4 +Richtlinien 4 +zunehmend 4 +zunehmenden 4 +vermeiden 4 +entweder 4 +Paket 4 +Pakistan 4 +Webseite 4 +schafft 4 +sichere 4 +beschlossen 4 +höchsten 4 +Geschäft 4 +Leitung 4 +Anliegen 4 +sage 4 +Dokumentation 4 +Herstellung 4 +17. 4 +Charakter 4 +gutes 4 +Kindern 4 +geringen 4 +Glück 4 +dich 4 +Winter 4 +mal 4 +dir 4 +Kamera 4 +Wahrheit 4 +Komponenten 4 +bereiten 4 +Vorbereitung 4 +behandelt 4 +nunmehr 4 +überprüfen 4 +zehn 4 +erfolgreiche 4 +paar 4 +Haftung 4 +entstehen 4 +nachhaltigen 4 +Abhängigkeit 4 +welcher 4 +vorliegen 4 +zusammenarbeiten 4 +vielmehr 4 +Anstrengungen 4 +Entwicklungsländern 4 +seines 4 +Januar 4 +Tätigkeit 4 +Sitz 4 +individuell 4 +Teilnahme 4 +internen 4 +Ideen 4 +Kilometer 4 +Standards 4 +schuf 4 +Arbeiten 4 +Papier 4 +hätten 4 +Klarheit 4 +Sand 4 +Verringerung 4 +Prodi 4 +15. 4 +Pakete 4 +bearbeiten 4 +vorstellen 4 +Dir 4 +ergeben 4 +Allgemeinen 4 +Amtszeit 4 +Arbeitslosigkeit 4 +komfortable 4 +Annehmlichkeiten 4 +Microsoft 4 +Erfahrungen 4 +wären 4 +halte 4 +Komfort 4 +stimmen 4 +illegale 4 +¿ 4 +½ 4 +alte 4 +La 4 +Mobilität 4 +Reform 4 +Nacht 4 +heutigen 4 +Deutschen 4 +Mio. 4 +gesprochen 4 +Ratspräsidentschaft 4 +nochmals 4 +dadurch 4 +Drittel 4 +bilden 4 +ausüben 4 +blieb 4 +8. 4 +hergestellt 4 += 4 +Kommunikation 4 +vorhanden 4 +bestehende 4 +anzuzeigen 4 +gemütlichen 4 +eindeutig 4 +See 4 +strategische 4 +informieren 4 +Investoren 4 +Fotos 4 +unverzüglich 4 +Königreich 4 +Reaktionen 4 +Wohlstand 4 +sechs 4 +Leider 4 +herzlich 4 +Frankfurt 4 +the 4 +hören 4 +bezüglich 4 +vorliegenden 4 +derer 4 +Buch 4 +Formen 4 +bringt 4 +entdeckt 4 +9 4 +bezeichnet 4 +bisherigen 4 +sorgt 4 +passiert 4 +Delegationen 4 +Vielen 4 +offiziell 4 +tut 4 +knapp 4 +zumindest 4 +Partner 4 +Emissionen 4 +erlauben 4 +vorgestellt 4 +ausgesprochen 4 +einsetzen 4 +bemühen 4 +Marken 4 +bekannten 4 +Leistungen 4 +Chancen 4 +folgt 4 +Respekt 4 +angehört 4 +Niveau 4 +September 4 +Anwendungen 4 +inbegriffen 4 +Materialien 4 +mittlerweile 4 +Mission 4 +Werk 4 +dritten 4 +breiten 4 +angesichts 4 +Monat 4 +26. 4 +Untersuchung 4 +Grenze 4 +Gegend 4 +Journalisten 4 +Justiz 4 +Akzeptanz 4 +einfache 4 +gewährleistet 4 +zielt 4 +Partnerschaft 4 +persönlich 4 +zusätzlich 4 +Partei 4 +Durchführung 4 +bezahlen 4 +Datenbank 4 +beteiligten 4 +vorgelegt 4 +Gärten 4 +erstellen 4 +Unternehmens 4 +100 4 +persönlichen 4 +Standard 4 +individuellen 4 +wirtschaftlicher 4 +Märkten 4 +Aufenthalt 4 +besseren 4 +extrem 4 +gestalten 4 +Waren 4 +schützt 4 +verlegt 4 +politisches 4 +eigene 4 +ernsthafte 4 +Nachfrage 4 +Türen 4 +Globalisierung 4 +Gruppen 4 +Fraktionen 4 +begleitet 4 +einzigartige 4 +früher 4 +Denn 4 +Israel 4 +Sanktionen 4 +gelungen 4 +Erzeugnisse 4 +stärken 4 +benutzen 4 +Anpassung 4 +gemeinsames 4 +getroffen 4 +Forum 4 +Städte 4 +Sehenswürdigkeiten 4 +folgenden 4 +Formular 4 +z. 4 +Entscheidungen 4 +Belgien 4 +jederzeit 4 +Obwohl 4 +Ressourcen 4 +Moderne 4 +Mehr 4 +Benutzer 4 +reagieren 4 +Menü 4 +Auswahl 4 +Erfolg 4 +Identität 4 +fortzusetzen 4 +derzeitigen 4 +Barcelona 4 +Anstieg 4 +gehalten 4 +ordnungsgemäß 4 +Film 4 +erhielt 4 +fuer 4 +gelegene 4 +persönliche 4 +Ministerrat 4 +Moment 4 +willkommen 4 +Mehrheit 4 +Gleichstellung 4 +Meine 4 +fühlen 4 +Welche 4 +Luft 4 +E-Mail 4 +Westen 4 +Group 4 +verhindert 4 +Hauptstadt 4 +elektronischen 4 +festgestellt 4 +Patienten 4 +Zeiten 4 +Ordner 4 +Strände 4 +Hinweis 4 +reden 4 +una 4 +nachhaltige 4 +Problemen 4 +Arbeitsplätzen 4 +Dorf 4 +amerikanischen 4 +Situationen 4 +regionalen 4 +zweite 4 +historischen 4 +Beweis 4 +Höhe 4 +Sitzung 4 +Freiheiten 4 +Entschließungsantrag 4 +darstellt 4 +ausschließlich 4 +Heizung 4 +75% 4 +Währung 4 +Erde 4 +weltweiten 4 +Methoden 4 +erfordert 4 +Gebrauch 4 +Erholung 4 +Transparenz 4 +Grundrechte 4 +tätig 4 +Medikamenten 4 +anders 4 +Stoffe 4 +Wein 4 +San 4 +Sonne 4 +buchen 4 +beinhaltet 4 +perfekt 4 +Besucher 4 +Belarus 4 +Uhr), 4 +Englisch 4 +GANTER 4 +Ausmaß 3 +2004 3 +Südafrika 3 +hinausgehen 3 +Finanzmittel 3 +erzeugen 3 +festhalten 3 +militärische 3 +Beschreibung: 3 +günstige 3 +Entdecken 3 +trotz 3 +läuft 3 +anspruchsvolle 3 +Produkt 3 +erteilt 3 +18. 3 +Wikitravel-Benutzer 3 +Messen 3 +Wissenschaftler 3 +Formel 3 +echten 3 +Großen 3 +Einklang 3 +Red 3 +marquis 3 +ehemalige 3 +beteiligt 3 +Rechnung 3 +festlegen 3 +Regel 3 +Anmerkungen 3 +öffentlich 3 +Ausfahrt 3 +Salz 3 +erstaunt 3 +Indikatoren 3 +Signal 3 +bestmöglichen 3 +Todesstrafe 3 +relevant 3 +Kombination 3 +verstehe 3 +Unternehmer 3 +Steuern 3 +allzu 3 +Erklärungen 3 +vorrangig 3 +geplant 3 +Kreatur 3 +Wichtigkeit 3 +60 3 +Nichts 3 +treffen 3 +ernstes 3 +120 3 +grossen 3 +Wohnzimmer 3 +Reise 3 +wert 3 +Google 3 +Bord 3 +Maschine 3 +spielt 3 +eingestellt 3 +Konflikte 3 +gesellschaftliche 3 +Betriebssystem 3 +auswirken 3 +Abkommens 3 +globale 3 +Klimawandel 3 +IBM 3 +Abfälle 3 +ökologischen 3 +Völker 3 +natürlichen 3 +Mitte 3 +Vorschlags 3 +Beamten 3 +davor 3 +anzuwenden 3 +ignorieren 3 +wesentliche 3 +Eigenschaft 3 +Verschmutzung 3 +zuständig 3 +Überwachung 3 +Werte 3 +Teams 3 +abgeben 3 +wann 3 +besitzen 3 +Stärkung 3 +Vereinten 3 +koennen 3 +Strukturen 3 +.... 3 +Mechanismus 3 +Preise 3 +Sicht 3 +gekennzeichnet 3 +Anfrage 3 +Nr. 3 +Musik 3 +Verhaltenskodex 3 +bedauerlich 3 +mussten 3 +Auto 3 +unterschiedlicher 3 +Kampagne 3 +Kategorie 3 +u. 3 +Dienstag 3 +offensichtlich 3 +Betriebe 3 +Schwung 3 +schließen 3 +Verhaftungen 3 +Kanada 3 +kurzen 3 +Köstlichkeiten 3 +berühmte 3 +erhält 3 +gerechte 3 +18: 3 +Nov 3 +WC 3 +Menschenrechtsorganisationen 3 +Titel 3 +exklusiv 3 +amerikanische 3 +el 3 +Ab 3 +Ausstattung 3 +bezahlt 3 +Wahlen 3 +Je 3 +zweiter 3 +Besonders 3 +schwierig 3 +Verlust 3 +Kroatien 3 +Gerade 3 +Kirche 3 +keineswegs 3 +einbezogen 3 +bereitet 3 +Vereinbarungen 3 +automatisch 3 +verhandeln 3 +Männern 3 +annähernd 3 +Währungsunion 3 +23 3 +Fan 3 +(" 3 +") 3 +lesen 3 +# 3 +dargestellt 3 +verehrte 3 +negativen 3 +h 3 +Tests 3 +Text 3 +Format 3 +Viele 3 +fallen 3 +Trotz 3 +Ausführung 3 +sogenannte 3 +Obama 3 +Glaubwürdigkeit 3 +Tode 3 +soweit 3 +Methode 3 +klare 3 +sicherer 3 +Sinn 3 +Eindruck 3 +Mittelpunkt 3 +Informationsgesellschaft 3 +berühmten 3 +nationaler 3 +fragen 3 +weitgehend 3 +verdient 3 +beste 3 +ohnehin 3 +Übereinkommen 3 +vermitteln 3 +Dimension 3 +Behinderten 3 +speziell 3 +Trotzdem 3 +Privatsphäre 3 +gefährden 3 +Minderheiten 3 +stattfinden 3 +Gehäuse 3 +Gesetz 3 +eingerichtet 3 +Bahnhof 3 +Hauptbahnhof 3 +Veranstaltung 3 +Sorgen 3 +Flächen 3 +Bucht 3 +8.000 3 +Besuchern 3 +gelegt 3 +hingegen 3 +darunter 3 +übernimmt 3 +erforderlichen 3 +Voraussetzungen 3 +niemand 3 +Phase 3 +Steuer 3 +beliebt 3 +König 3 +Forderung 3 +müssten 3 +Ohren 3 +geschützt 3 +Sprachen 3 +(oder 3 +Workshops 3 +ausgeführt 3 +angegebenen 3 +gefährlich 3 +Verteidigung 3 +Jahrhunderts 3 +Jugoslawien 3 +Übergangsregelung 3 +einheitliche 3 +sehe 3 +leisten 3 +Wird 3 +Partnerschaften 3 +stärkere 3 +Anspruch 3 +Vertrages 3 +est 3 +ans 3 +stimme 3 +Bedenken 3 +verursacht 3 +Wiederaufbau 3 +solange 3 +Weine 3 +erheblichen 3 +Armut 3 +normalerweise 3 +neuesten 3 +Maße 3 +Erstellung 3 +Harmonisierung 3 +einzigen 3 +Bekämpfung 3 +Präsentationen 3 +Annäherung 3 +Status 3 +Gibt 3 +Lido 3 +² 3 +existiert 3 +debattieren 3 +gültige 3 +geschieht 3 +vorzulegen 3 +Werten 3 +positiv 3 +Hauptaugenmerk 3 +betreffend 3 +Einwanderung 3 +Dollar 3 +ber 3 +Stand 3 +rein 3 +Zudem 3 +einschließlich 3 +zuvor 3 +erwähnte 3 +Frühjahr 3 +Berichts 3 +bestimmen 3 +Aufgaben 3 +Vietnam 3 +Airlines 3 +erhöht 3 +Kann 3 +verlassen 3 +NATO 3 +Schon 3 +zweites 3 +südlichen 3 +) 3 +Aufzug 3 +Tonnen 3 +(= 3 +Unsicherheit 3 +Weiteren 3 +Liebe 3 +entwickelte 3 +trägt 3 +Delegation 3 +Kommissionsmitglieder 3 +Schiffe 3 +kurzer 3 +dynamische 3 +Chile 3 +Kreativität 3 +notwendige 3 +teilen 3 +Vereinigte 3 +Anhang 3 +Maßnahme 3 +Hause 3 +rechnen 3 +Rechnungshof 3 +Beschäftigten 3 +Liberalisierung 3 +Himmel 3 +Straße 3 +Faktoren 3 +Mitteleuropa 3 +Dateien 3 +Jede 3 +Staatsverschuldung 3 +zulässt 3 +inklusive 3 +Raum 3 +Zeichen 3 +Sämtliche 3 +verleiht 3 +Volkes 3 +erneut 3 +erleben 3 +Holz 3 +Global 3 +moderne 3 +streichen 3 +geplanten 3 +Vom 3 +größer 3 +funktionieren 3 +aktive 3 +positiven 3 +Oder 3 +gefördert 3 +genommen 3 +allein 3 +Code 3 +Aspekten 3 +Nachbarschaft 3 +anmerken 3 +Verbreitung 3 +offizielle 3 +verleihen 3 +allgemeinen 3 +gerecht 3 +Beschäftigung 3 +Wirtschafts- 3 +Prioritäten 3 +Hauptbahnhofs 3 +gedacht 3 +Schritte 3 +bekannte 3 +Modul 3 +Touristen 3 +Kurz 3 +professionelle 3 +EZB 3 +reagiert 3 +momentan 3 +Angst 3 +Kreisen 3 +Motoren 3 +Exchange 3 +Pro 3 +gestartet 3 +ständig 3 +anbieten 3 +Flexibilität 3 +höhere 3 +beliebigen 3 +Pläne 3 +Aufbau 3 +Internationale 3 +Aufhebung 3 +Ziffer 3 +deswegen 3 +Heute 3 +Sozialdemokraten 3 +Dadurch 3 +Ufer 3 +Einkaufsmeile 3 +sparen 3 +Mauer 3 +Barroso 3 +Hintergrund 3 +gearbeitet 3 +erleichtern 3 +bevorzugt 3 +Anmeldung 3 +geltenden 3 +Organe 3 +enthalt 3 +Unterkategorien 3 +kontextuell 3 +grammatisch 3 +Schlusselwort 3 +Schlusselphrase 3 +C 3 +Vostermans 3 +is 3 +Streit 3 +Kauf 3 +falls 3 +rechtliche 3 +Anforderungen 3 +Klasse 3 +(Die 3 +Zögern 3 +zugänglich 3 +gestatten 3 +Bemerkung 3 +d.h. 3 +internationales 3 +scheinen 3 +geboren 3 +bewaffneten 3 +Plenum 3 +gratulieren 3 +verschwunden 3 +bedienen 3 +annehmen 3 +Kontrollen 3 +Folge 3 +beseitigen 3 +distanziert 3 +erschöpft 3 +(z.B. 3 +kontaktieren 3 +Selbst 3 +neun 3 +Absicht 3 +erklärt 3 +einheitlichen 3 +funktioniert 3 +gekürzt 3 +wechseln 3 +Doppelbett 3 +schön 3 +wunderschöne 3 +Air 3 +> 3 +beliebte 3 +stellten 3 +Deutsche 3 +beinahe 3 +Sierra 3 +unmöglich 3 +Dritte 3 +wächst 3 +Abschnitt 3 +Los 3 +London 3 +künftig 3 +wiederholen 3 +stelle 3 +Bestand 3 +Fassung 3 +bekämpfen 3 +Vorteile 3 +nutzbar 3 +stärker 3 +Apartment 3 +absolute 3 +4. 3 +Initiativen 3 +strategischen 3 +Australien 3 +erscheint 3 +führende 3 +genaue 3 +gleichermaßen 3 +Werkzeug 3 +Strukturfonds 3 +Behinderungen 3 +Genießen 3 +07: 3 +serviert 3 +Immerhin 3 +freuen 3 +fordert 3 +versichert 3 +Christian 3 +dans 3 +Straßen 3 +betreiben 3 +Zug 3 +Freitag 3 +eingeleitet 3 +Handels 3 +warum 3 +Regelwerk 3 +entstanden 3 +begrenzt 3 +betreibt 3 +Infrastruktur 3 +genannte 3 +Kommissionspräsident 3 +zurückkommen 3 +erweist 3 +illegalen 3 +Überlegungen 3 +$ 3 +kontrollieren 3 +linken 3 +dieselben 3 +Kommentare 3 +erfreut 3 +Center 3 +politisch 3 +Angesichts 3 +lediglich 3 +hinzufügen 3 +Abstimmungen 3 +42 3 +langem 3 +Ein- 3 +gern 3 +Staats- 3 +Betracht 3 +Wiederherstellung 3 +behandeln 3 +einfachen 3 +letztlich 3 +Kreditkarte 3 +Konzept 3 +verkauft 3 +offenen 3 +gegenwärtig 3 +Muss 3 +Schuhe 3 +Einstieg 3 +parallele 3 +I 3 +O 3 +gelangen 3 +Hostels 3 +anerkannt 3 +Anpassungen 3 +nötig 3 +stand 3 +Schlafzimmer 3 +Botschafter 3 +Ratifizierung 3 +einführen 3 +finanziell 3 +entfernen 3 +Brasilien 3 +Höhlen 3 +Versionen 3 +sämtliche 3 +versichern 3 +interpretiert 3 +Herz 3 +sieben 3 +fiel 3 +Fenster 3 +Internationalen 3 +Wege 3 +handeln 3 +Zuge 3 +konzentriert 3 +ärmsten 3 +Lebens 3 +entlassen 3 +schnellstmöglich 3 +verboten 3 +glauben 3 +portugiesischen 3 +ehemaliger 3 +entwickelten 3 +QuarkXPress 3 +Steuerung 3 +aktivieren 3 +Wochen 3 +gesehen 3 +Arzneimitteln 3 +derartige 3 +Periode 3 +1992 3 +traditionellen 3 +ARBURG 3 +realisieren 3 +religiöse 3 +Ortes 3 +Prinzipien 3 +Übereinstimmung 3 +profitieren 3 +geleitet 3 +Biokraftstoffe 3 +Hohen 3 +bewegen 3 +Suites 3 +137 3 +Verkehrsmitteln 3 +diskutiert 3 +abgestimmt 3 +Aktivität 3 +begrenzen 3 +Mutter 3 +ergibt 3 +Darum 3 +Quoten 3 +Benachrichtigung 3 +2.0 3 +Verträge 3 +eigentliche 3 +Erwachsene 3 +Null 3 +gezwungen 3 +Studie 3 +vernünftige 3 +Genehmigung 3 +bequem 3 +Seine 3 +Kommunismus 3 +Farben 3 +verringern 3 +umfangreichen 3 +abstimmen 3 +Schwächen 3 +Wo 3 +entscheidende 3 +beschäftigen 3 +heißt 3 +jegliche 3 +Arbeitnehmer 3 +Kommissarin 3 +verteilt 3 +modernster 3 +Design 3 +eingehen 3 +Volkswirtschaften 3 +Geben 3 +hilft 3 +Zinssätze 3 +wiederum 3 +folgende 3 +künftige 3 +Test 3 +Studien 3 +festen 3 +Prüfung 3 +Größe 3 +Genau 3 +Geräte 3 +Lukaschenko 3 +Nahe 3 +Präsentation 3 +eröffnen 3 +Umgang 3 +Telefon 3 +Vielleicht 3 +Schluss 3 +Nahrungsergänzungsmitteln 3 +Frühstück 3 +Ereignisse 3 +auftreten 3 +endgültigen 3 +Drittstaaten 3 +Menschenrechtsverletzungen 3 +Prozesses 3 +1997 3 +Konkurrenz 3 +russische 3 +acht 3 +gerät 3 +Gepäckraum 3 +sozialer 3 +komme 3 +Gleichzeitig 3 +Gespräch 3 +Diskussionen 3 +Gremien 3 +verbieten 3 +übernehmen 3 +Agenda 3 +Senkung 3 +(die 3 +Wave 3 +S 3 +Brüssel 3 +Flüge 3 +besonderer 3 +Fortschritt 3 +erfolgt 3 +y 3 +Details 3 +zahlreichen 3 +klein 3 +unvergessliche 3 +inspiriert 3 +umzusetzen 3 +Transport 3 +abgeschlossen 3 +Termin 3 +ändert 3 +befreien 3 +Hochschule 3 +Reden 3 +herum 3 +ständigen 3 +Weiter 3 +Büchern 3 +Nationalismus 3 +präsentiert 3 +kostet 3 +voller 3 +Gerichtshof 3 +Beim 3 +Diabetes 3 +zählt 3 +kommerziellen 3 +älteren 3 +respektiert 3 +Ankunft 3 +Schönheit 3 +einstimmig 3 +entscheidend 3 +alternative 3 +Sektor 3 +Museen 3 +Einstellung 3 +eingetreten 3 +Vielfalt 3 +Zielen 3 +kannst 3 +Vertragsverletzungsverfahren 3 +Publikum 3 +verstärkt 3 +Hafen 3 +verfolgen 3 +sicherlich 3 +Luxus 3 +schätzen 3 +mehrfach 3 +Charta 3 +Veränderung 3 +views 3 +com 3 +Piazza 3 +della 3 +Salon 3 +angenehme 3 +Nehmen 3 +sagten 3 +2013 3 +grünen 3 +Messe 3 +˙ 3 +volle 3 +Mangel 3 +grundlegende 3 +Vorlage 3 +eindeutige 3 +Verbündeten 3 +Museum 3 +räumt 3 +konstant 3 +idealen 3 +beteiligen 3 +derartigen 3 +größeren 3 +Französisch 3 +Healthy 3 +regelmäßig 3 +Investition 3 +lieber 3 +Ganze 3 +PlayStation 3 +Ansichten 3 +me 3 +të 3 +FI 3 +Erkrankungen 3 +vorher 2 +Stretching 2 +Also 2 +namens 2 +Flüchtlinge 2 +untersucht 2 +Haushalt 2 +Haushalte 2 +gemeinsamer 2 +Name 2 +(FR) 2 +betraut 2 +Kurze 2 +verantwortliche 2 +Vorentwurf 2 +vorlegen 2 +Gemeinsamen 2 +Fischereipolitik 2 +Schöne 2 +achten 2 +Verteidigungspolitik 2 +intuitive 2 +Berge 2 +ist: 2 +Minute 2 +Übrige 2 +ausreichen 2 +Dort 2 +Station 2 +21: 2 +Ablauf 2 +übrigens 2 +ehrlich 2 +überlassen 2 +fanden 2 +mehrfacher 2 +Tour 2 +Entfernung 2 +Mondes 2 +hervorragenden 2 +Knochen 2 +angewendet 2 +Schmerz 2 +erzielte 2 +Ganzen 2 +15: 2 +11. 2 +spezieller 2 +Abteilungen 2 +Wiederaufnahme 2 +Einwanderer 2 +festgelegt 2 +berät 2 +Fischler 2 +Lande 2 +einschreiten 2 +vorbereitet 2 +Unfall 2 +Europäer 2 +setzt 2 +kritische 2 +terroristische 2 +nachgedacht 2 +bewohnt 2 +Grundsatz 2 +Erfindungen 2 +stammen 2 +Sun 2 +(IT) 2 +Beratung 2 +Zahlung 2 +Körper 2 +Verlauf 2 +Erwachsenen 2 +GAP-Reform 2 +Landwirte 2 +wunderbaren 2 +erarbeitet 2 +Einheit 2 +Verantwortlichkeit 2 +Festlegung 2 +einzuleiten 2 +Fähigkeit 2 +spanischer 2 +ECU 2 +NRO 2 +Sehen 2 +Straßenrand 2 +Windows 2 +angebotene 2 +erweiterten 2 +statistischer 2 +Warum 2 +diskutieren 2 +25% 2 +gesteigert 2 +40% 2 +stattdessen 2 +diesbezüglich 2 +verwirklicht 2 +Dem 2 +ideale 2 +öffnet 2 +PC 2 +erkennt 2 +Klar 2 +Algerien 2 +Kamin 2 +hübscher 2 +Küchenzeile 2 +(mit 2 +eigenem 2 +Badezimmer 2 +Metropole 2 +Adresse 2 +fünften 2 +GAP 2 +ebnen 2 +hoher 2 +realen 2 +Arbeitsbedingungen 2 +Studio 2 +Ähnliches 2 +grundlegend 2 +El 2 +kosten 2 +Abendessen 2 +Mercator 2 +Features 2 +könnt 2 +Anreise 2 +entstandenen 2 +Tunesien 2 +Ägypten 2 +Skype-Software 2 +Funktionalität 2 +Plattform 2 +Beschlüsse 2 +gefasst 2 +nachteilig 2 +Erreichung 2 +Zellen 2 +Theorie 2 +Zuerst 2 +Taliban 2 +Manager 2 +installiert 2 +(Der 2 +beschränkter 2 +Landbau 2 +Asyl 2 +größtmöglichen 2 +Agrarpolitik 2 +Geltungsbereich 2 +Wissen 2 +verloren 2 +Golfplätzen 2 +behoben 2 +morgens 2 +kostenfrei 2 +76 2 +magnetische 2 +Geräten 2 +Schwelle 2 +überschritten 2 +Beendigung 2 +zugunsten 2 +Betreuung 2 +militärischen 2 +Operationen 2 +staatlich 2 +kraftvoll 2 +sinnvoll 2 +erwiesen 2 +drücken 2 +Einstellungen 2 +Arbeitslosen 2 +angehende 2 +Empfehlung 2 +insofern 2 +Verkehr 2 +Basel 2 +Rubrik 2 +Sackgasse 2 +Netz 2 +verkündet 2 +funktionsfähig 2 +Abwesenheit 2 +einzig 2 +Aufforderung 2 +Securities 2 +Association 2 +Inseln 2 +sonst 2 +berufen 2 +Portugal 2 +günstigen 2 +Kunstwerke 2 +längs 2 +Venedig 2 +kompliziert 2 +geliefert 2 +polnische 2 +gebeten 2 +untersuchen 2 +finanziert 2 +Gemäß 2 +hinarbeiten 2 +Hotelpersonal 2 +hilfsbereit 2 +vierten 2 +belegen 2 +Architektur 2 +Essen 2 +abgeändert 2 +Quartalen 2 +t 2 +a. 2 +gesetzlichen 2 +befreit 2 +flexibel 2 +Bewegungen 2 +Piraterie 2 +Ruf 2 +reichlich 2 +geringe 2 +Teller 2 +Archiv 2 +gautam 2 +unabhängiger 2 +Weiterbildung 2 +audiovisuellen 2 +Stattdessen 2 +verteidigt 2 +Vorausschau 2 +Finanzministern 2 +Hostel 2 +Kühlschrank 2 +Nachdem 2 +mehren 2 +registrieren 2 +unmittelbar 2 +wegzudenken 2 +Neue 2 +Radio 2 +Bank 2 +personenbezogenen 2 +que 2 +para 2 +al 2 +Palermo 2 +arabischen 2 +Betrifft: 2 +eins 2 +behilflich 2 +besprechen 2 +Media 2 +Abgesehen 2 +zustimmen 2 +hängt 2 +Array 2 +doppelte 2 +NICHT 2 +Eden 2 +seriöse 2 +Vorstellung 2 +desto 2 +Lesung 2 +Priorität 2 +Kinnock 2 +Zollunion 2 +verzeichnet 2 +betont 2 +Ebenfalls 2 +zerstören 2 +Maria 2 +gewiss 2 +ernst 2 +Formulierung 2 +Album 2 +Projekten 2 +Karten 2 +Deiner 2 +Deinem 2 +Boot 2 +weckt 2 +Rettungsmodus 2 +Vorsicht 2 +vieles 2 +absolut 2 +Abend 2 +Rathaus 2 +Lohngefälle 2 +unterzeichnet 2 +Kapitel 2 +Allianz 2 +Liberalen 2 +Demokraten 2 +Direkt 2 +String 2 +angekündigt 2 +Anmerkung 2 +Stockwerk 2 +kurzem 2 +renoviert 2 +Duschkabine 2 +Fleisch 2 +Rindfleisch 2 +Beratungen 2 +kostenlose 2 +Messestand 2 +Applikationen 2 +Lieder 2 +Hugo 2 +Wolf 2 +Umstieg 2 +Unix 2 +Klima 2 +üblichen 2 +ehemals 2 +haben: 2 +Missbrauch 2 +effizienter 2 +öffentlicher 2 +ernannte 2 +betreffenden 2 +bekommt 2 +symbolische 2 +Schwachstellen 2 +wiederherzustellen 2 +Integrität 2 +vereint 2 +7. 2 +19 2 +Religion 2 +diesjährigen 2 +Ausbau 2 +Orte 2 +Gestaltung 2 +attraktiver 2 +qualitativ 2 +Freude 2 +verbindet 2 +empfindlichen 2 +Netze 2 +Computersysteme 2 +Gela 2 +unterhalten 2 +Abbas 2 +Fayyad 2 +bemüht 2 +wahre 2 +auszeichnet 2 +wunderschönen 2 +Ausblick 2 +beschäftigt 2 +Sponsor 2 +rechtfertigt 2 +Christus 2 +Bald 2 +Bahnhofstrasse 2 +autofreien 2 +Zermatt 2 +empfängt 2 +Seiler 2 +Schweizerhof 2 +beschließen 2 +solches 2 +Transaktionen 2 +gleichmäßige 2 +Ziffern 2 +bist 2 +Party 2 +ruhige 2 +Damals 2 +einhalten 2 +geile 2 +Sehr 2 +Einsatz: 2 +Notebook 2 +Stunde 2 +Politikern 2 +Kosovo 2 +leiden 2 +versuche 2 +menschliche 2 +Eigentlich 2 +Aufnahmen 2 +barocken 2 +Glockenturm 2 +Bahnhöfe 2 +kleineren 2 +spezifischen 2 +Ausstellern 2 +lahm 2 +Automatisierung 2 +Support 2 +grenzüberschreitende 2 +gegenseitige 2 +Gerichte 2 +Somit 2 +großzügigen 2 +Fischern 2 +British 2 +Airways 2 +Verluste 2 +anerkannten 2 +Tourismus 2 +Schnee 2 +gefallen 2 +abgedeckt 2 +Vorgänge 2 +gesenkt 2 +realisiert 2 +genießt 2 +Stellungnahme 2 +Pisten 2 +Laurin 2 +bestens 2 +effektiv 2 +verstecken 2 +Einstimmigkeit 2 +unternommen 2 +Hat 2 +ernannt 2 +Behörde 2 +alternativen 2 +genehmigt 2 +Mais 2 +Coaching 2 +Seminaren 2 +externen 2 +Guinea 2 +Ersatz 2 +21. 2 +begegnen 2 +rechtzeitig 2 +Sportaktivitäten 2 +Golf 2 +Wassersport 2 +Innovation 2 +Flüsse 2 +grenzüberschreitend 2 +Copyright 2 +SchülerInnen 2 +Schauen 2 +historische 2 +schwierige 2 +lehnt 2 +Besteuerung 2 +Erzeugnissen 2 +Entschädigung 2 +Teilnehmern 2 +Maastricht 2 +Europäern 2 +Il 2 +43 2 +vous 2 +Vorbild 2 +ratifizieren 2 +besonderen 2 +optischen 2 +exakte 2 +Würze 2 +Stätte 2 +nahmen 2 +Zelte 2 +humanitäre 2 +beheben 2 +jemand 2 +Ankündigung 2 +existierenden 2 +22: 2 +Acht 2 +Spezialisten 2 +optimiert 2 +Falric 2 +Times 2 +ursprünglich 2 +landwirtschaftlichen 2 +Subventionen 2 +kürzen 2 +öffnen 2 +sozial 2 +Speisesaal 2 +akzeptiert 2 +Chalet 2 +m2 2 +perfekte 2 +allgemeiner 2 +angemessenen 2 +Grün 2 +Ball 2 +steigt 2 +Verlängerung 2 +Gesetzes 2 +Libyen 2 +Sachen 2 +weiterer 2 +Zeitung 2 +las 2 +weitergegeben 2 +Bäcker 2 +Lebensmittelgeschäft 2 +Postamt 2 +schlimm 2 +Mitgliedstaat 2 +fordere 2 +Nachhaltigkeit 2 +konsistenten 2 +Stellenwert 2 +Mengen 2 +beliefert 2 +wodurch 2 +schönes 2 +Meerblick 2 +verdienen 2 +Juden 2 +Roma 2 +Russen 2 +spanischen 2 +größter 2 +bedenken 2 +Flugzeuge 2 +relevante 2 +45 2 +Betten 2 +Gründe 2 +weshalb 2 +Krieg 2 +Beschäftigungspolitik 2 +einzurichten 2 +vorne 2 +beraten 2 +Bohrungsklasse 2 +Osteuropa 2 +Berücksichtigung 2 +schöner 2 +Frühstücksraum 2 +Ohne 2 +Hektar 2 +Meinungsfreiheit 2 +Swimmingpools 2 +Terrassen 2 +Urteil 2 +beseitigt 2 +Forschungspartnerschaften 2 +falsch 2 +Co. 2 +Wünsche 2 +Des 2 +klaren 2 +Kompetenz 2 +sauberste 2 +Bewusstsein 2 +Vorstellungen 2 +laufenden 2 +gelöst 2 +Zustandekommen 2 +Betreibern 2 +Agentur 2 +Nachricht 2 +organisierte 2 +Gewerkschaft 2 +professionellen 2 +wendet 2 +britischen 2 +EU-Ebene 2 +ansieht 2 +Selbstverständlich 2 +grundlegenden 2 +Agrar- 2 +Mir 2 +Zuallererst 2 +Hof 2 +privat 2 +geführte 2 +Ferienwohnung 2 +Camping 2 +Irlands 2 +Konvention 2 +ausleihen 2 +vergeben 2 +Konkurrenzkampf 2 +General 2 +DC 2 +tolle 2 +könne 2 +mitten 2 +U-Bahnstation 2 +Haarausfall 2 +Haar 2 +bestätigte 2 +Amt 2 +Presse 2 +Großbritannien 2 +Now 2 +basierend 2 +Grundrechte-Charta 2 +eigens 2 +solidarische 2 +Websites 2 +Energiequellen 2 +Doha 2 +Inland 2 +Erinnerung 2 +sicheren 2 +entwickelnden 2 +wirksamen 2 +Hingabe 2 +starken 2 +schicken 2 +Dein 2 +gleichsam 2 +administrativen 2 +Sozialisten 2 +Börsen 2 +Know-how 2 +schließe 2 +Feststellung 2 +Gewicht 2 +Modells 2 +weitaus 2 +B 2 +30% 2 +Vergemeinschaftung 2 +zahlt 2 +Aufpreis 2 +Zustellbett 2 +Ratsvorsitzes 2 +Telekommunikation 2 +Bevor 2 +Schlüsse 2 +zieht 2 +welches 2 +sagte: 2 +innovativer 2 +menschlichen 2 +Angriff 2 +o 2 +verläuft 2 +Fundament 2 +Verhinderung 2 +tiefer 2 +Konferenz 2 +Insofern 2 +eignet 2 +Siegern 2 +Einsätze 2 +Kommissionspräsidenten 2 +Wesentlichen 2 +Mannschaften 2 +1945 2 +Hotelgebäude 2 +Beamte 2 +Hohe 2 +Verarbeitung 2 +Grundsätzen 2 +Lissabon-Strategie 2 +Wünschen 2 +Durchbruch 2 +dürfte 2 +1989 2 +zentral 2 +van 2 +Verhofstadt 2 +Beobachter 2 +globaler 2 +beenden 2 +Gemeinsame 2 +geeignete 2 +Weniger 2 +früheren 2 +Overhead 2 +erhöhten 2 +Risiken 2 +Verhaltensweisen 2 +gestorben 2 +nächster 2 +Abstimmungsstunde 2 +Marke 2 +Einladung 2 +einseitig 2 +herrschende 2 +gefährliche 2 +rücken 2 +Fokus 2 +gerichtet 2 +jenen 2 +Präsidenten 2 +Swiss 2 +pdf 2 +Mitteilungen 2 +IT-Lösungen 2 +Konzern 2 +EADS 2 +Mannheim 2 +begrüßte 2 +Teilnehmer 2 +Bewohner 2 +NI 2 +sucht 2 +permanent 2 +relevanten 2 +egal 2 +easyswap 2 +Vereinbarung 2 +überwacht 2 +anhand 2 +F 2 +sonstigen 2 +nachfolgende 2 +Paulus 2 +vorn 2 +Imre 2 +Geschmack 2 +zukünftig 2 +anbietet 2 +Wortlaut 2 +vorlesen 2 +gestrichen 2 +dänischen 2 +Stufe 2 +WWU 2 +Variable 2 +hinzugefügt 2 +Beide 2 +bester 2 +Verzögerung 2 +Stände 2 +praktischen 2 +beschleunigen 2 +erhebliche 2 +osteuropäischen 2 +(LT) 2 +bereitgestellt 2 +Dauer 2 +Projekts 2 +Umfang 2 +Herstellers 2 +Palette 2 +offen 2 +durchzusetzen 2 +festzulegen 2 +Erfassung 2 +e.V. 2 +verurteilen 2 +© 2 +Übermittlung 2 +München 2 +Konvent 2 +Aparthotel 2 +Swimmingpool 2 +Branchen 2 +33 2 +° 2 +Grad 2 +Ventilation 2 +Bücher 2 +Voraus 2 +Umstellung 2 +musste 2 +Merkmale 2 +Einrichtung 2 +thermischen 2 +Schutzschaltern 2 +1,5 2 +klicken 2 +Beschränkungen 2 +Asien 2 +komplette 2 +Unternehmern 2 +Schließung 2 +Guantánamo 2 +Abnahme 2 +spezialisierten 2 +besonderes 2 +Rua 2 +hielt 2 +Sparprogramm 2 +US-Dollar 2 +Ergänzung 2 +systemische 2 +Verpflichtung 2 +Extremisten 2 +besetzt 2 +profitiert 2 +verteilen 2 +Freizeit 2 +Weiters 2 +Unionsbürger 2 +EU-Mitgliedsstaaten 2 +iranische 2 +Zivilgesellschaft 2 +fähig 2 +durchzuführen 2 +Fünf 2 +längere 2 +Truppen 2 +unterzeichnete 2 +I. 2 +festgelegte 2 +Arten 2 +Wettbewerbsverzerrungen 2 +behindern 2 +Konzerne 2 +Manche 2 +extremen 2 +Käufer 2 +auffordern 2 +Begleiter 2 +Sorge 2 +Rumänien 2 +Mastorakis 2 +Realität 2 +befürworte 2 +Decke 2 +exklusiven 2 +Schlösser 2 +transparenter 2 +ratsam 2 +einschränken 2 +Schwarz 2 +Farbe 2 +festzustellen 2 +Ausgestattet 2 +mitbringen 2 +Rezeption 2 +-- 2 +veröffentlicht 2 +Jahrzehnte 2 +echter 2 +Ihres 2 +Magst 2 +Tiere 2 +Blut 2 +Obst 2 +Finanzierung 2 +Eurex 2 +Verhältnis 2 +herausstellt 2 +Pradollano 2 +Nevada 2 +weiterzugeben 2 +hierzu 2 +näher 2 +wenngleich 2 +Sollte 2 +PPE 2 +markierte 2 +erörtern 2 +endgültige 2 +erobern 2 +harmonisieren 2 +Zuwanderung 2 +legalen 2 +Migration 2 +Entwicklungsländer 2 +Markteinführung 2 +Ansprüche 2 +richtet 2 +Tschechiens 2 +Vorteil 2 +kommunizieren 2 +Audio 2 +erhältlich 2 +Ortschaft 2 +beigetragen 2 +vollständigen 2 +(SK) 2 +Aktien 2 +durchgeführten 2 +Zwei 2 +Trends 2 +global 2 +bedeutenden 2 +S. 2 +Gleichbehandlung 2 +einfach: 2 +Wirtschaftspolitik 2 +Kobe 2 +Layout 2 +präsentieren 2 +leistete 2 +geschlossenen 2 +hierbei 2 +HTML 2 +mögliche 2 +Seien 2 +Veranstalter 2 +Royal 2 +gespielt 2 +coeur 2 +europäisches 2 +Territorium 2 +heraus 2 +umfassendes 2 +Mittag 2 +Flug 2 +Donnerstag 2 +fur 2 +Führerschein 2 +le 2 +Kritiker 2 +Hühner 2 +Elefant 2 +qm 2 +Mehrwertsteuer 2 +verschoben 2 +technologische 2 +Bars 2 +unabhängige 2 +vorzubereiten 2 +iPhone 2 +Inhalten 2 +Eckpfeiler 2 +Panasonic 2 +exklusive 2 +Instanzen 2 +melden 2 +Urheber 2 +Gefangenen 2 +gestattet 2 +öffnete 2 +beliebten 2 +Versicherungsnehmern 2 +Erwartungen 2 +Konvents 2 +lebenden 2 +Ablehnung 2 +Änderungsantrags 2 +enthaltenen 2 +beabsichtigen 2 +Nicole 2 +Plenarsaal 2 +fehlen 2 +Beratern 2 +Jugend 2 +angestrebten 2 +All 2 +abgewickelt 2 +Eigentum 2 +Ordnung 2 +Ausserdem 2 +Partitionen 2 +Amerikas 2 +Iran 2 +aufzuhalten 2 +jüngster 2 +Krankheiten 2 +auslösen 2 +vorsieht 2 +geredet 2 +werden; 2 +liegenden 2 +3. 2 +erforderliche 2 +reichen 2 +Übersetzung 2 +unterscheiden 2 +reproduziert 2 +Fonds 2 +Ungleichheiten 2 +Vorfeld 2 +Krieges 2 +Beseitigung 2 +gelehrt 2 +proprietäre 2 +Nationalpark 2 +bekanntesten 2 +fühlt 2 +Chef 2 +Exekutive 2 +Buchstaben 2 +Villa 2 +gänzlich 2 +High 2 +Übrigen 2 +erteilte 2 +Unterschriften 2 +sammeln 2 +Irland 2 +Stimme 2 +Chancengleichheit 2 +ließ 2 +längst 2 +Notes 2 +Stahl 2 +motorischen 2 +wesentlichen 2 +ansah 2 +geringeren 2 +allgemein 2 +Offene 2 +Eskalation 2 +erneute 2 +Kunde 2 +erhöhen 2 +Verkäufer 2 +nachfragen 2 +Danke 2 +Kenia 2 +Schule 2 +Schüler 2 +Peter 2 +finanziellen 2 +Karte 2 +28. 2 +Allgemeine 2 +Kabinett 2 +Shuttleservice 2 +konfrontiert 2 +jährlichen 2 +Standard-Symbole 2 +Symbole 2 +Indem 2 +engagieren 2 +bedürfen 2 +anhalten 2 +Zerstörung 2 +durchführen 2 +32 2 +nachdrücklich 2 +Prognosen 2 +Wachstums 2 +begründete 2 +globalen 2 +Begriff 2 +elegante 2 +Tap: 2 +Sauna 2 +Spa 2 +gewählte 2 +Aufgabenstellung 2 +Wesentliche 2 +Fernseher 2 +Parkplatz 2 +Argument 2 +Binnenmarkts 2 +ausgewählt 2 +300 2 +vollständige 2 +erwähnen 2 +Vorbereitungen 2 +Jahresbericht 2 +Kontrollsysteme 2 +Niederlanden 2 +Mitgliedschaft 2 +empfehlen 2 +gereinigt 2 +Mein 2 +begeistert 2 +aufgebaut 2 +Leipzig 2 +erlangen 2 +Lücke 2 +Link 2 +demokratisch 2 +Suiten 2 +westlich 2 +Via 2 +wird; 2 +langfristig 2 +Gründung 2 +schließt 2 +Holm 2 +Relevanz 2 +vorgebracht 2 +Preisstabilität 2 +Verwirklichung 2 +Volk 2 +Orten 2 +Händler 2 +Thematik 2 +Analysen 2 +abends 2 +Frosch 2 +Van 2 +selber 2 +verbesserte 2 +Flash 2 +überzeugende 2 +Ausarbeitung 2 +JavaScript 2 +Bauern 2 +eben 2 +Patenten 2 +Diagnose 2 +Heilmittel 2 +schneller 2 +hoffentlich 2 +(* 2 +Provinz 2 +† 2 +20. 2 +Obersten 2 +Green 2 +juristischen 2 +Hindernisse 2 +zurückgreifen 2 +Wahrzeichen 2 +CityGuide 2 +http: 2 +folgte 2 +Gebäuden 2 +Hinweise 2 +Lust 2 +intensiv 2 +Taxi 2 +Lisboa 2 +Schließlich 2 +ungarischen 2 +Grünen 2 +Metern 2 +Charme 2 +gezogen 2 +guter 2 +befürwortet 2 +Größenordnung 2 +stehenden 2 +füllen 2 +G 2 +D 2 +E 2 +Aerospace 2 +Market 2 +First 2 +Board 2 +Problematik 2 +Eure 2 +Beteiligung 2 +spektakulär 2 +Strand 2 +Alter 2 +Ausflügen 2 +laufen 2 +Berufe 2 +geringem 2 +droht 2 +Drittens 2 +Rechtsgrundlage 2 +Boden 2 +Kritik 2 +umfasst 2 +früh 2 +alt 2 +Handbuch 2 +Toten 2 +Laufe 2 +dienen 2 +Kraftstoff 2 +ablehnen 2 +Darlehen 2 +Netzwerk 2 +angeschlossenen 2 +Konfliktlösung 2 +Glas 2 +begeistern 2 +kulturellen 2 +Alltag 2 +Fisch 2 +wartet 2 +Wien 2 +Gewinn 2 +errichtet 2 +kamen 2 +Einträge 2 +Klimawandels 2 +hervorragend 2 +Krisenzeiten 2 +spüren 2 +anerkenne 2 +positive 2 +gültig 2 +Tel 2 +Techniker 2 +optimale 2 +verweist 2 +wozu 2 +kürzere 2 +Fahrräder 2 +Baht 2 +Verkauf 2 +drucken 2 +niedrig 2 +optimalen 2 +Kühlung 2 +heißen 2 +Geschäftsleitung 2 +Facility 2 +Anweisungen 2 +Navigation 2 +Energieversorgung 2 +Material 2 +generell 2 +Giorgio 2 +einmalige 2 +Staates 2 +Energiepolitik 2 +künftigen 2 +sofern 2 +00: 2 +03: 2 +verstärken 2 +leisteten 2 +massive 2 +Kunststoff 2 +teil 2 +Winkel 2 +Kongress 2 +kritisch 2 +Andere 2 +erzeugten 2 +überzeugten 2 +faire 2 +Straßenseite 2 +Institute 2 +Motor 2 +Welle 2 +Lebensdauer 2 +Spitze 2 +Santa 2 +Modernisierung 2 +Sicherung 2 +doppelten 2 +Handlungen 2 +verkürzen 2 +Zeitplans 2 +Kürze 2 +manches 2 +vereinbart 2 +rechtlich 2 +werdet 2 +Harbour 2 +Sachkunde 2 +anstehende 2 +Frage: 2 +Afrika 2 +joc 2 +selectati 2 +si 2 +Java 2 +EU-Staaten 2 +menschlicher 2 +Ehrung 2 +Nahrungsmitteln 2 +Richard 2 +Mark 2 +Samstag 2 +Brandanschlag 2 +Nordirland 2 +geeignet 2 +schlechte 2 +reich 2 +heutige 2 +Freund 2 +Führer 2 +Little 2 +bekam 2 +Soldaten 2 +dargelegt 2 +anzubieten 2 +Sampere 2 +Kurse 2 +staatlichen 2 +parlamentarischen 2 +gefährdete 2 +Mathematik 2 +Unterschied 2 +anwenden 2 +Bereiche 2 +kombiniert 2 +südlich 2 +abzuschließen 2 +ausgesetzt 2 +aufmerksam 2 +höchste 2 +Gegner 2 +gefährdet 2 +Integrationsmaschine 2 +zugeordnet 2 +erholen 2 +sportlichen 2 +Geschehen 2 +ploplinux 2 +Testformalisierung 2 +UML 2 +komplexe 2 +Systeme 2 +Taiwan 2 +langjährige 2 +Siehe 2 +verhalten 2 +zugestimmt 2 +schweren 2 +zukünftige 2 +Studium 2 +Babybett 2 +Bus 2 +(fährt 2 +Leidenschaft 2 +Parkplätze 2 +nachts 2 +wohnungen 2 +beschleunigt 2 +stärksten 2 +Zentralbank 2 +Bingo 2 +Spiels 2 +Dock 2 +bequemen 2 +derzeitige 2 +Importe 2 +Luftfahrtindustrie 2 +Schulen 2 +Vorgehen 2 +wollten 2 +Objekte 2 +integrierte 2 +senden 2 +Milenko 2 +Badzic 2 +gemeinschaftliche 2 +Flotte 2 +Positionen 2 +Suchen 2 +gesicherte 2 +hierauf 2 +Schwerpunkt 2 +Qualitätskontrolle 2 +Dich 2 +Facebook 2 +Zinsen 2 +Rom 2 +ausgewählten 2 +Überdies 2 +Befürchtungen 2 +zweier 2 +Struktur 2 +Finalisten 2 +entscheidet 2 +höher 2 +Blickwinkel 2 +Faktor 2 +erscheinen 2 +bewerten 2 +übereinstimmen 2 +bürgerlichen 2 +Löcher 2 +ähnliche 2 +Äste 2 +beschneiden 2 +gegebenen 2 +Deswegen 2 +Dachterrasse 2 +Balzan 2 +Stiftung 2 +Lusu 2 +Gemeinde 2 +Hügel 2 +Laßt 2 +weise 2 +gewissen 2 +gelingt 2 +Bestimmung 2 +Integrationsprozess 2 +lehnen 2 +Steigerung 2 +privater 2 +geschehen 2 +Aussage 2 +Angriffe 2 +Geschäftsjahr 2 +Umsatz 2 +erzielt 2 +Bäumen 2 +erkunden 2 +Franzosen 2 +Bezeichnung 2 +beziehe 2 +Dessen 2 +ungeachtet 2 +Ausbildungszeit 2 +fairen 2 +Inhaftierung 2 +Versorgung 2 +diesmal 2 +konzentrierten 2 +berufliche 2 +LKW 2 +Vertrieb 2 +gedrängt 2 +gehandhabt 2 +bislang 2 +haftet 2 +Auftragnehmer 2 +schlafen 2 +Spiele 2 +erwerben 2 +Normen 2 +erfreulich 2 +fassen 2 +Sofia 2 +sagen: 2 +CO2-Emissionen 2 +prächtigen 2 +trotzdem 2 +Veto 2 +schrieb 2 +au 2 +Einzelheiten 2 +fortgesetzt 2 +.) 2 +laden 2 +erkennen 2 +Diesbezüglich 2 +Abschluß 2 +verändert 2 +Beschreibung 2 +Pricing 2 +Büro 2 +Gespräche 2 +Ton 2 +verlangt 2 +völliger 2 +verbliebenen 2 +Gebet 2 +Retorte 2 +Ofen 2 +Segel 2 +Umstand 2 +komplex 2 +Erhaltung 2 +Produktionsmethoden 2 +wirksame 2 +wählte 2 +Klinik 2 +Kaffee 2 +Israelis 2 +ausführliche 2 +Ratsvorsitz 2 +Saal 2 +Wochenende 2 +Glückwunsch 2 +Geschäfte 2 +Antwort: 2 +tropischen 2 +Nigeria 2 +vorliegende 2 +nachhaltig 2 +schriftliche 2 +Reisenden 2 +Worten 2 +Gründen 2 +Änderungsvorschlag 2 +getroffene 2 +Armee 2 +konkreten 2 +hell 2 +Einzelbetten 2 +identifizieren 2 +aufgezeichnet 2 +herzlichen 2 +Basierend 2 +Artenvielfalt 2 +Mauszeiger 2 +(bei 2 +obligatorisch 2 +miteinander 2 +anpassen 2 +abgeschnitten 2 +Eleganz 2 +versetzt 2 +Zwerge 2 +klarem 2 +steigen 2 +begannen 2 +durchgesetzt 2 +Vereinfachung 2 +VII 2 +...). 2 +verbinden 2 +Abends 2 +gutem 2 +49 2 +24-Stunden-Rezeption 2 +Nichtraucherzimmer 2 +schnelles 2 +Auschecken 2 +Hotelsafe 2 +Gay 2 +Friendly 2 +Klimaanlage 2 +Industriepolitik 2 +Eines 2 +freie 2 +Anlass 2 +fünfte 2 +Generation 2 +Anzeige 2 +verlängern 2 +südamerikanischen 2 +aufhalten 2 +Schlag 2 +Verdacht 2 +hat; 2 +Haushaltsplan 2 +Bewohnern 2 +verglichen 2 +ausschliesslich 2 +qualifiziert 2 +trinken 2 +Sind 2 +Schweizer 2 +Gallen 2 +Umständen 2 +biete 2 +Plateau 2 +Industrien 2 +jährlich 2 +umweltfreundliche 2 +Ausflug 2 +Community 2 +Versammlungsfreiheit 2 +August 2 +staatliche 2 +Street 2 +Bond 2 +kleines 2 +ermitteln 2 +Arbeitsgruppe 2 +Fuß 2 +Humankapital 2 +anzupassen 2 +Kraljevica 2 +12. 2 +Nationale 2 +Tisch 2 +Informieren 2 +erinnert 2 +Führung 2 +Innenstadt 2 +wichtigste 2 +moralischer 2 +Kräfte 2 +Verteilung 2 +Truppe 2 +greifen 2 +Eis 2 +Press 2 +weiterentwickelt 2 +Kompilierung 2 +benötigten 2 +erwirtschafteten 2 +GEA 2 +Konzernumsatz 2 +beherbergt 2 +zugeteilt 2 +Zypern 2 +Shambhala 2 +enthielt 2 +gleichwohl 2 +prominenter 2 +Föderation 2 +Danach 2 +rechts 2 +zurzeit 2 +Lesen 2 +Morgen 2 +Niemand 2 +tue 2 +Ära 2 +Ferner 2 +Milvorum 2 +Schnaps 2 +üblich 2 +Mandelson 2 +Ferienhäuser 2 +Zwecken 2 +Strafe 2 +veröffentlichte 2 +Chance 2 +frühstück 2 +Übersicht 2 +4-Sterne-Hotel 2 +Standorten 2 +vernünftigen 2 +bevorstehenden 2 +weiterführen 2 +Weltbank 2 +1969 2 +ausgeweitet 2 +Kürzlich 2 +1998 2 +Bemühungen 2 +Anführungszeichen 2 +einzelne 2 +zentralen 2 +damals 2 +bedarf 2 +Modifikator 2 +Verfütterung 2 +Schiene 2 +Glauben 2 +beiträgt 2 +vertiefen 2 +logischerweise 2 +Nachrichten 2 +Programms 2 +richten 2 +Begriffe 2 +Streitigkeiten 2 +überall 2 +Le 2 +Pferde 2 +schlechten 2 +Praktiken 2 +Laufen 2 +Ebenso 2 +Systemsteuerung 2 +Airbus 2 +amtlichen 2 +Geschäften 2 +effiziente 2 +Basis 2 +negative 2 +gib 2 +außergewöhnlichen 2 +Eigenschaften 2 +zuverlässigen 2 +Desktop 2 +Serie 2 +Feinde 2 +Bulgarien 2 +Balkanländer 2 +Zeitalter 2 +wertvolle 2 +ausgestattete 2 +private 2 +Ökonomie 2 +Weltwirtschaft 2 +keinem 2 +Korinth 2 +überlebten 2 +vorhandenen 2 +vielfältige 2 +Stoffen 2 +gestaltet 2 +verzichtet 2 +votes 2 +Konzentration 2 +mittel- 2 +beeinflussen 2 +überwinden 2 +Flasche 2 +herrscht 2 +beeinträchtigt 2 +Festung 2 +NORDENIA 2 +Protest 2 +Giovanni 2 +kulinarische 2 +fügt 2 +deiner 2 +Ganz 2 +organisiert 2 +freut 2 +gesetzlich 2 +MwSt 2 +Netzwerken 2 +ungefähr 2 +Vergütung 2 +Zustand 2 +Pragmatismus 2 +lernen 2 +Bürgermeister 2 +Kanal 2 +gestoßen 2 +falschen 2 +reicht 2 +Korea 2 +Schloss 2 +2020 2 +ausführlichen 2 +Füßen 2 +Brücke 2 +Sprachversionen 2 +Apart 2 +aufklären 2 +nahm 2 +verstoßen 2 +Römer 2 +wahr 2 +Tuzla 2 +Vorgehensweise 2 +Tauchen 2 +begehrt 2 +sprachen 2 +praktische 2 +Handhabung 2 +standardmäßig 2 +gierig 2 +Anschein 2 +Altstadt 2 +Design-Hotel 2 +(Erklärung 2 +GO) 2 +Philips 2 +Entspannen 2 +Erkundungstouren 2 +Institut 2 +technisch 2 +Image 2 +Währungsfonds 2 +Fabriken 2 +Weisheit 2 +wem 2 +ward 2 +Umweltfragen 2 +Dafürhalten 2 +schnelle 2 +dauerhafte 2 +Beschriftung 2 +o. 2 +Warme 2 +chinesische 2 +vorschlagen 2 +deuten 2 +Beteiligten 2 +Maß 2 +Veräußerung 2 +kompletten 2 +internationalem 2 +nachzukommen 2 +wesentlicher 2 +Sammlungen 2 +Jasénka 2 +Weichmacher 2 +Fertigung 2 +motivierten 2 +Hypothese 2 +Wörter 2 +Reich 2 +genügend 2 +Übersetzern 2 +lokaler 2 +aussehen 2 +Rand 2 +Tolles 2 +ausländische 2 +PHP-Code 2 +Gold 2 +700 2 +ist; 2 +Fläche 2 +formell 2 +LDAP 2 +Lieferanten 2 +Riegel 2 +rufen 2 +außen 2 +más 2 +Mechanismen 2 +Aroma 2 +deine 2 +ehrgeizig 2 +Beihilfen 2 +einzustellen 2 +Gerichtshofs 2 +Grammatik 2 +LG 2 +Expo 2 +Configuration 2 +Bay 2 +St 2 +funktionsfähige 2 +Volkspartei 2 +@ 2 +People 2 +gesunde 2 +gesunden 2 +Wirksamkeit 2 +Kurs 2 +türkischen 2 +Matrix 2 +Lizenz 2 +klicke 2 +Musicals 2 +Frische 2 +Einer 2 +Residenzen 2 +Konstantinopel 2 +Fahren 2 +real 2 +Konsequenzen 2 +II 2 +Flaschen 2 +österreichischen 2 +Allen 2 +Letter 2 +Star 2 +begab 2 +sich: 2 +kostenfreien 2 +bewegt 2 +Abgeordneter 2 +Tachelhit 2 +Arabisch 2 +schieben 2 +School 2 +Auftraggeber 2 +mann 2 +Funde 2 +Konsum 2 +daraus 2 +gelesen 2 +Gegenwart 2 +b 2 +Fach 2 +Milch 2 +größtenteils 2 +Netzwerk-Schnittstellen 2 +Network-Konto 2 +venezianischen 2 +2012 2 +21 2 +übernommen 2 +MatchWork 2 +repressive 2 +gewann 2 +Jan 2 +Passwort 2 +Safaris 2 +Sri 2 +DSPs 2 +Gärung 2 +Fingerspitzengefühl 2 +Reservierung 2 +spätestens 2 +Ansatz 2 +tres 2 +dos 2 +Buslinie 2 +Haltestelle 2 +beeindruckt 2 +Überzeugen 2 +Mongolen 2 +Lobby 2 +umgekehrt 2 +Achtung 2 +Ausführungsbestimmungen 1 +Bürokratien 1 +Gesetzgebungsrecht 1 +ausgehebelt 1 +Meistertrainer 1 +leitender 1 +Dozent 1 +italienischen 1 +Fitnessverbands 1 +Aerobic 1 +Gruppenfitness 1 +Haltungsgymnastik 1 +Pilates; 1 +Antiche 1 +Terme 1 +Trainer 1 +Lehrer 1 +Pilates 1 +Rückengymnastik 1 +erzählte 1 +Grace 1 +Human 1 +Rights 1 +Watch-Mitarbeiter 1 +Gerry 1 +Simpson 1 +zimbabwischen 1 +Vervielfältigung 1 +Erreichen 1 +Nationalstaaten 1 +Rationalisierungseffekte 1 +Synergieeffekte 1 +Programm-Titel 1 +erworben 1 +UNO 1 +Selbstverteidigung 1 +Irak 1 +zutrifft 1 +geehrte 1 +eingangs 1 +Schattenberichterstatter 1 +verlässt 1 +dato 1 +Gesang 1 +Mårthen 1 +Cedergran 1 +Bombshell 1 +Rocks 1 +Tätowierer 1 +selbständig 1 +Gesamthaushaltsplans 1 +soviel 1 +Reisende 1 +Backpackers 1 +zugesichert 1 +Benachteiligung 1 +Seeleute 1 +auswärtige 1 +ehrenwerten 1 +parlamentarisches 1 +Mittelmeerforum 1 +Bedienung 1 +Bildbearbeitung 1 +Küsten 1 +transparenten 1 +Umweltprüfungen 1 +erfolgen 1 +Ventura 1 +MC 1 +verarbeitete 1 +Takten 1 +wirtschaftliches 1 +Fadenheften 1 +täglichen 1 +Prozessverbesserer 1 +Eiffel 1 +vorn; 1 +Entscheidungsprozess 1 +angekommen 1 +wissenschaftliche 1 +mitsamt 1 +Besatzung 1 +Anschließend 1 +Jagd 1 +cardassianische 1 +Versorgungsschiffe 1 +Tatatabot 1 +Winzer 1 +Rebflächen 1 +Geltungsdauer 1 +Bepflanzungsrechts 1 +wiederzubepflanzen 1 +Offen 1 +Unruhestifter 1 +Zufall 1 +Eingang 1 +France 1 +vergleichbar 1 +wahrgenommene 1 +(d.h. 1 +Betrachter 1 +scheint) 1 +baulich 1 +wunderschön 1 +Skeletts 1 +eliminiert 1 +Gelenken 1 +Bone 1 +Bruises 1 +assoziiert 1 +Sep 1 +Detektive 1 +tschechischer 1 +bedeutungsvoller 1 +Kriminaldelikte 1 +illegaler 1 +iranischen 1 +Zuwanderungsfragen 1 +weiterentwickeln 1 +Punzieren 1 +früheste 1 +Verbraucherschutzes 1 +nahm; 1 +1260 1 +Silberstandard 1 +Lieber 1 +Problemstellungen 1 +tragen; 1 +Arbeitsplatzerhalt 1 +Landschaftspflege 1 +Kultur; 1 +Verstoß 1 +Anschlag 1 +heranzutreten 1 +Europäerinnen 1 +aufs 1 +Embedded 1 +erzwungene 1 +Abhängigkeiten 1 +australische 1 +John 1 +Howard 1 +Präventivschläge 1 +Zufluchtsstätten 1 +Spater 1 +Kares 1 +Finikes 1 +Krites 1 +Iones 1 +Finanzhilfe 1 +AKP-Bananenerzeugerländer 1 +Zweiphasen-Induktionsmotor 1 +Drehstromverfahren 1 +Mehrphasenmotoren 1 +Magnetismus 1 +kabellose 1 +Energiegewinnung 1 +-uebertragung 1 +Yat-sen 1 +Süden 1 +Neihu 1 +biegen 1 +Schnellstraße 1 +Sektion 1 +NeiHu 1 +Road 1 +Cottignys 1 +Arbeitnehmern 1 +Mindestbeiträge 1 +Abonnement 1 +Geldstrafe 1 +Schichten 1 +ziemlich 1 +liebenswert 1 +Blüte 1 +nang 1 +sexy 1 +späteren 1 +Großeltern 1 +getrennten 1 +Wohneinheiten 1 +unterzubringen 1 +virtuelle 1 +begehen 1 +obendrein 1 +Absurditäten 1 +angemessene 1 +zuverlässige 1 +Fruchtfolgesystemen 1 +Proteindefizit 1 +Preisschwankungen 1 +Originale 1 +Räumen 1 +Österreichischen 1 +Belvedere 1 +Seekarten 1 +Emissionsverringerung 1 +Kyoto 1 +(2012) 1 +Lehre 1 +Sieg 1 +Beharrlichkeit 1 +worunter 1 +Sturheit 1 +eigen 1 +bewahren 1 +Verdient 1 +Berufung 1 +Helms-Burton-Gesetz 1 +Plan 1 +Finanzvolumen 1 +geringfügig 1 +abspielt 1 +einheitlich 1 +still 1 +unbeweglich 1 +verharren 1 +Wintersport 1 +Fitness 1 +Skilanglauf 1 +lieben 1 +Kompatibilität 1 +Vista 1 +abzugeben 1 +Exportmärkten 1 +Typhoon 1 +Flugzeug 1 +hochmodernes 1 +Mehrzweck-Kampfflugzeug 1 +Luftüberlegenheitsrolle 1 +Luft-Boden-Funktionen 1 +ausgelegt 1 +reiner 1 +Neugier 1 +Verspätungen 1 +Cisalpino 1 +Pendolino 1 +darzustellen 1 +langsam 1 +Italien 1 +Waffenexporte 1 +Geboren 1 +1958 1 +Obernberg 1 +Inn 1 +machte 1 +Mozarts 1 +Quintett 1 +B-Dur 1 +KV 1 +174 1 +(Verwenden 1 +Fähigkeiten 1 +jener 1 +entscheidender 1 +AGRIMEC 1 +1979 1 +gegründet.Sie 1 +Gewächshausanlagen 1 +Erkenntnisse 1 +gewonnen 1 +national 1 +Gartenmöbel 1 +Fürsorge 1 +Möbel 1 +Willens 1 +beharre 1 +Punkt: 1 +Bedeutsames 1 +Magier 1 +Hexenmeister 1 +unterschiedliches 1 +Spielgefühl 1 +fachsimpeln 1 +Aussichtsterrasse 1 +Bergstation 1 +Fluten 1 +Reichenbachfalls 1 +solcher 1 +offensichtlicher 1 +JBOD 1 +Just 1 +bunch 1 +discs 1 +": 1 +Laufwerke 1 +USB 1 +Kabel 1 +Zeroual 1 +Karren 1 +Dreck 1 +Grosses 1 +rustikaler 1 +Geschirrspülmaschine) 1 +ausziehbarer 1 +Doppelschlafcouch 1 +Unternehmungslustige 1 +pulsierende 1 +Karnevalshochburg 1 +Plugin 1 +scannt 1 +Webseiten 1 +PDF-Links 1 +Klick 1 +Docs 1 +Viewer 1 +zusätlichen 1 +PDF-Reader 1 +Dialogs 1 +modernisierte 1 +Laserlichts 1 +Messgeräte 1 +Präzision 1 +Werkstatt 1 +Steht 1 +Ecke 1 +Grösse 1 +geradeso 1 +Koffer 1 +zulegen 1 +Ergebnisaufwertung 1 +Hotelrestaurant 1 +Rincon 1 +französische 1 +carte 1 +Mittag- 1 +Bildschirmanzeige 1 +wahlweise 1 +zylindrisch 1 +Zimmer-Annullationen 1 +Zurzeit 1 +untergraben 1 +Hindernis 1 +Aufstände 1 +API 1 +bezieht 1 +Routinen 1 +bestimmtes 1 +bereitstellt 1 +dramatische 1 +Kopenhagen 1 +Anstoß 1 +Biologen 1 +Behauptung 1 +widersprachen 1 +sphärisch 1 +antwortete 1 +Rashevsky 1 +müsse 1 +einfachsten 1 +geehrten 1 +Lange 1 +Geiselnehmer 1 +drohen 1 +umzubringen 1 +unbeaufsichtigt 1 +deinstalliert 1 +Meldepflicht 1 +entladen 1 +unterbricht 1 +Redner) 1 +Sicherheitsanforderungen 1 +geschäftsunfähiger 1 +Geschäftsfähigkeit 1 +verdienten 1 +einnehmen 1 +Watson 1 +Baroness 1 +Ludford 1 +zufolge 1 +Sanktionsregelung 1 +beantragt 1 +vorsichtig 1 +Völkerrecht 1 +Erzeuger 1 +Erarbeitung 1 +Setzen 1 +Würfelkombination 1 +(en) 1 +Münze 1 +Spieltisch 1 +geglaubt 1 +Sparen 1 +Roaming-Gebühren: 1 +Pre-Paid 1 +SIM-Karten 1 +Modem 1 +launch2net 1 +Premium 1 +Verbindungsdetails 1 +Donnerstag: 1 +Gotf 1 +Löchern 1 +Minigolfplatz 1 +NSC-Fraktionen 1 +Ausrufezeichen 1 +Questgebern 1 +auswirkten 1 +regelmäßigen 1 +Trainieren 1 +Fitnesscenter 1 +surfen 1 +Lobbycafé 1 +Sondertarife 1 +Anwendungsgebiet 1 +Legierungen 1 +81% 1 +Ni 1 +Abschirmungen 1 +Baugruppen 1 +Schaltelementen 1 +Intolerablen 1 +Monopol 1 +innehat 1 +Creative 1 +Recreation 1 +WMNS 1 +Galow 1 +HI 1 +(gelb 1 +schwarz) 1 +Bestellbar 1 +eventuellen 1 +gewarnt 1 +Pressefreiheit 1 +Berisha 1 +seinerzeitigen 1 +Wahlbeobachtern 1 +Provinzialismus 1 +jeglicher 1 +herabsetzen 1 +Landesgrenzen 1 +Halt 1 +Nachbarländer 1 +Quecksilberpolitik 1 +drängen 1 +Kommandanten 1 +begründeten 1 +kontraproduktiv 1 +leisen 1 +feinfühligen 1 +Betreuern 1 +Design-Scrum-of-Scrums 1 +Design-Lösungen 1 +behalten 1 +Stand-up-Meetings 1 +regelmäßige 1 +Meetings 1 +GUI-Verantwortlichen 1 +vorgehen 1 +Nichtsdestotrotz 1 +Handling 1 +irrelevant 1 +Knöpfe 1 +belegt 1 +intuitiv 1 +weiss 1 +Diskutiert 1 +Pflegegeldes 1 +Sozialjahr 1 +Umschulung 1 +Pflegesemester 1 +Medizinstudenten 1 +Lambsdorff 1 +Bungalow 1 +einziehen 1 +PACT-ähnliche 1 +intermodalen 1 +Beitrittsländern 1 +Häuser 1 +fűr 1 +D.h. 1 +aufbringen 1 +wettbewerbsfähige 1 +analysieren 1 +Oberflächen 1 +Grenzflächen 1 +modernsten 1 +Messgeräten 1 +Mikro- 1 +Nanometerbereich 1 +CGI-Programm 1 +(Ausgaben) 1 +Klienten 1 +mitzuteilen 1 +Neuerscheinungen 1 +(Leitmesse 1 +exclusive 1 +Spieldosen 1 +Uhren) 1 +Exklusiv 1 +Modelle 1 +Index 1 +Mitentscheidung 1 +Vermittlung 1 +SLD 1 +Aktion 1 +Firmenläufe 1 +übergeben 1 +Differenz 1 +taucht 1 +begnadigen 1 +gangbare 1 +Beteiligungsbasis 1 +Beschäftigungswachstum 1 +unsichere 1 +Arbeitsverhältnisse 1 +(H-0549 1 +00): 1 +Jungen 1 +Kindergarten 1 +Kindergärtnerinnen 1 +Aufseherinnen 1 +Sonderlehrern 1 +Turnen 1 +Katechese 1 +Kommissars 1 +formulierten 1 +Sektorverbände 1 +Federation 1 +Exchanges 1 +(FESE), 1 +Counterparty 1 +Clearing 1 +Houses 1 +(EACH) 1 +Depositories 1 +(ECSDA) 1 +Kornati 1 +unvergeßlich 1 +Erlabnis 1 +nirgendwo 1 +wiederfinden 1 +lägen 1 +Rentenkassen 1 +schuldenbeladenen 1 +Griechenland 1 +übertrieben 1 +geliehen 1 +Spekulationsblasen 1 +führte 1 +platzen 1 +grundsätzliche 1 +Venezianischen 1 +Villen 1 +berühmter 1 +Artisten 1 +Palladio 1 +Veronese: 1 +Reiseroute 1 +Flusses 1 +Brenta 1 +Padova 1 +Burchiello 1 +Zollabfertigungsprozedere 1 +Frachtführer 1 +Frachtpartie 1 +Unglücks 1 +Smolensk 1 +freiwilliges 1 +herhalten 1 +verabschiedete 1 +Rahmenrichtlinien 1 +ersetzen 1 +Arbeitsteilung 1 +gehöre 1 +REACH 1 +Sherpas 1 +Gebhardt 1 +angenehm 1 +sauber 1 +freundlich 1 +gefolgt 1 +Mode 1 +Literatur 1 +(vier 1 +Prozent). 1 +Berichtentwurf 1 +ausgedrückt 1 +Protokolls 1 +übermäßigen 1 +Defizit 1 +(VÜD) 1 +Vliesstoffen 1 +13,2% 1 +256.000 1 +Teppichen 1 +Fußbodenbelägen 1 +Filz 1 +schrumpfte 1 +23,4% 1 +gestrigen 1 +hieß 1 +genehmigten 1 +Takt-Richtungs-Module 1 +(D-Sub25) 1 +Parallelport 1 +Softwarepaketen 1 +Genehmigungen 1 +Umweltschutz- 1 +Lärmschutznormen 1 +bevorteilt 1 +schädlich 1 +anwesenden 1 +Schottischen 1 +Nationalpartei 1 +Ewing 1 +kategorisch 1 +Georgien 1 +pro-westlichen 1 +neuem 1 +Fischfang- 1 +Handelsflotte 1 +Ruin 1 +abhängt 1 +mutmaßlichen 1 +Kriegsverbrechern 1 +veranlasst 1 +führend 1 +Sprachausbildung 1 +Einen 1 +Spaziergang 1 +vielfältigen 1 +Rex 1 +Kino 1 +Place 1 +République 1 +bewundern 1 +Musée 1 +Grévin 1 +Wachsarbeiten 1 +ansehen 1 +Sprichwort 1 +ungerechte 1 +wohnt 1 +Normalerweise 1 +bestellen 1 +Entfernte 1 +Verweise 1 +Woody 1 +Audiovisual 1 +Training 1 +Coalition 1 +(ATC) 1 +ATC 1 +repräsentiert 1 +führender 1 +transnationalen 1 +verschrieben 1 +anstrengend 1 +ergänzt 1 +Über 1 +informelle 1 +Finanziellen 1 +ECOFIN-Rat 1 +Apr 1 +05 1 +Sauber 1 +geführtes 1 +nettes 1 +Großzügiges 1 +Spülbecken 1 +Humanitarismus 1 +eingebracht 1 +Aktivistinnen 1 +Nashrin 1 +Afzali 1 +Nahid 1 +Jafari 1 +Zeynab 1 +Peyghambarzadeh 1 +Haftstrafen 1 +Bewährung 1 +Peitschenhieben 1 +MSN 1 +Webmaster-Tools 1 +Sitemap 1 +Musikwettbewerb 1 +Karneval 1 +Kulturen 1 +Gegen 1 +1920er 1 +Filmindustrie 1 +führten 1 +Kinos 1 +strömten 1 +Sparsamkeit 1 +vonnöten 1 +sammelt 1 +wissentlich 1 +KraMixer 1 +1.0.3.3 1 +programa 1 +servirá 1 +desarrollar 1 +máximo 1 +Chaos 1 +geordnet 1 +reduziert 1 +Bergen 1 +italienisch 1 +Conca 1 +oro 1 +(Goldene 1 +Muschel) 1 +Orangenhaine 1 +Herrschaft 1 +umgaben 1 +Fördergebietskarte 1 +Shine 1 +Wijchen 1 +Niederländische 1 +Jugendchampion 1 +gehohlt 1 +Kommissionspräsident: 1 +Initiativgewalt 1 +ehrgeiziges 1 +soziales 1 +Ergänzungsanträge 1 +Mikrokreditressourcen 1 +verlangen 1 +Mikrokredit 1 +Eintrittskarten 1 +Karel-Gott-Konzert 1 +26.11 1 +Spread 1 +Banners 1 +Banner 1 +Operator 1 +rechsstehende 1 +linksstehende 1 +überschrieben 1 +Rauchens 1 +Krankheit 1 +zuzieht 1 +Krankenhausbett 1 +Werde 1 +benachrichtigt 1 +adidas-slopestyle 1 +Slideshows 1 +EDEN 1 +Zürich 1 +Geschäftsleute 1 +Feriengäste 1 +Stars 1 +königlich 1 +schwankender 1 +Häufigkeit 1 +Input 1 +Endergebnis 1 +99% 1 +Textes 1 +beibehalten 1 +verliehen 1 +zusammenfassen 1 +Ausführliche 1 +Arbeitswelten 1 +TUI 1 +südafrikanischen 1 +(SACU) 1 +Zolleinnahmen 1 +Poręba 1 +EMC 1 +Documentum 1 +firmeninterne 1 +Content 1 +Etikettierungen 1 +tricksen 1 +Tourismus- 1 +Verkehrssektor 1 +Strahover 1 +Kloster 1 +Jungfrau 1 +1140 1 +gemeinter 1 +Vorgang 1 +integrativ 1 +illegal 1 +Zuweisung 1 +Energieverbindungen 1 +Gas- 1 +Stromversorgungsnetzen 1 +Wähle 1 +wirf 1 +ab: 1 +Bringe 1 +Friedhof 1 +Deine 1 +unparlamentarisches 1 +Mond 1 +aufgeht 1 +Exkursion 1 +Anak 1 +Krakatau 1 +Dienstprogramm 1 +bisschen 1 +Sehnsucht 1 +Erinnerungen 1 +Kindheit 1 +Arcade 1 +Raketenabwehr 1 +NATO-Projekt 1 +Gesamteuropa 1 +absichert 1 +Bewachung 1 +Außengrenzen 1 +Software-RAID 1 +rettenden 1 +verwandt 1 +Skripte 1 +RAID-Verbünde 1 +aufbauen 1 +Vivahotel 1 +Pitti 1 +Ponte 1 +Vecchio 1 +Vergnügen 1 +wünscht 1 +Postunternehmen 1 +Endvergütungsfrage 1 +athletischer 1 +Teeny 1 +netter 1 +Boy 1 +nebenan 1 +tättowierter 1 +Hengst 1 +gewalttätigen 1 +Autonomen 1 +Gemeindezentrum 1 +enthüllt 1 +Kaltfolie 1 +qualitative 1 +Heißfolie 1 +anzusiedeln 1 +Zwischen 1 +2007-2013 1 +Gesamthöhe 1 +15,4 1 +Zloty 1 +Nicht-Einmischung 1 +anerkannt: 1 +eifrig 1 +Privatleben 1 +willentlich 1 +aufgegeben 1 +Gemeinschaftskompetenz 1 +(ALDE) 1 +besonderem 1 +Ankunftsebene 1 +Taxistand 1 +Bushaltestellen 1 +Kith 1 +Kids 1 +malen 1 +Hinweis: 1 +socket 1 +read 1 +() 1 +leeren 1 +HQ 1 +Episoden 1 +Stills 1 +Supernatural 1 +Episode 1 +5.09 1 +Napolitano 1 +legte 1 +Nexus 1 +One 1 +Ad-hoc-Abschnitt 1 +kurze 1 +bedrückt 1 +Plafondventilator 1 +(3 1 +stufig), 1 +Phon 1 +Massentierhaltung 1 +drittens 1 +Methanproduktion 1 +meiste 1 +p 1 +cosmos 1 +Mindestenes 1 +Hälfte 1 +Liedschaffen 1 +Gustav 1 +Mahler 1 +Charakters 1 +Avira 1 +TOPAZ 1 +Archiv: 1 +FileAlyzer 1 +.txt-Dateien 1 +Rich 1 +Endung 1 +.rtf) 1 +Rumäniens 1 +Bulgariens 1 +Ermüdung 1 +Fehlschläge 1 +Auffassungen 1 +Übereinstimmungen 1 +Tagesordnungspunkt 1 +kommunistische 1 +kämpfen 1 +Gerichtswesens 1 +B5-0783 1 +McKenna 1 +Lagendijk 1 +Verts 1 +ALE-Fraktion 1 +Interimsregierung 1 +Präfektur 1 +verantwortlichen 1 +Befriedung 1 +Teilnehmerzahlen 1 +deutliche 1 +Zócalo 1 +Marktzugang 1 +verwehren 1 +Meines 1 +Zivilluftfahrt 1 +Luftverkehrs 1 +Vorsichtsmaßnahme 1 +EU-Ratspräsidentschaft 1 +Westens 1 +schleichenden 1 +Separatismus 1 +beschreiten 1 +individuelle 1 +Inspiration 1 +Selbstbestimmung 1 +territorialer 1 +56 1 +61 1 +bezweifele 1 +Fischereisektor 1 +Treibstoffpreise 1 +Überkapazität 1 +Anhänger 1 +Pünktlich 1 +Saisongeschäfts 1 +arvato 1 +digital 1 +services 1 +Blu-ray 1 +Kapazitäten 1 +Schrieb 1 +Menge 1 +Zielseiten 1 +Psychologie 1 +unmissverständliche 1 +Produktentwickler 1 +hochwertiger 1 +Echtholzboden 1 +Behaglichkeit 1 +bleibenden 1 +Grund: 1 +Ausweisung 1 +vorgegangen 1 +Sicher 1 +Traumlandschaften 1 +(Grußkarten 1 +Otto 1 +Jaennis 1 +Artea 1 +Verlag) 1 +Transferpaste 1 +(Pidy 1 +Transfercolour 1 +Sambra) 1 +Stoff 1 +elektronischer 1 +Datenschutzes 1 +(Smoljan-Gegend), 1 +Skiressort 1 +Pamporovo 1 +Fuße 1 +Rhodopengipfels 1 +Perelik 1 +(2191 1 +m), 1 +Kundschaft 1 +Dirk 1 +Mountainbikes 1 +Rikscha 1 +sitzen 1 +Ministerpräsident 1 +EGKS 1 +Kofinanzierungen 1 +Mittelaufbringung 1 +adaptiert 1 +stößt 1 +Vic 1 +Gegenliebe 1 +Banken-Stresstests 1 +abwarten 1 +Monats 1 +Rotary 1 +hoteleigenen 1 +vertraute 1 +Westerschelde 1 +ausgezeichneten 1 +Restaurantküche 1 +probieren 1 +Aufforderungen 1 +FIFA 1 +Sportartikelfirmen 1 +FIFA-lizenzierten 1 +Sportartikeln 1 +Fußbällen 1 +Deskom 1 +Digitalisierungsforums 1 +stattfand 1 +SNCF 1 +referiert 1 +kultureller 1 +Güter 1 +Identitäten 1 +kommerzielle 1 +Tajani 1 +sendet 1 +Verbesserte 1 +Erkennung 1 +Alters 1 +Alternativ 1 +Kasino 1 +Wahrlich 1 +Herr: 1 +eintritt 1 +nachdenken 1 +Vorbeugung 1 +HIV 1 +charmanter 1 +rustikal-sportlicher 1 +Dezibel 1 +rechtlichen 1 +memory 1 +amerikanisches 1 +Rouletterad 1 +38 1 +Vertiefungen 1 +36 1 +nummeriert 1 +nebligen 1 +trüben 1 +kalten 1 +langweiligt 1 +denk 1 +taghellem 1 +Ausgiebiges 1 +schien 1 +Grundprinzipien 1 +Diskriminierungsverbot 1 +Sprachenvielfalt 1 +wertschätzen 1 +H1 1 +stabile 1 +mobilen 1 +UMTS 1 +Finanzielle 1 +Kontakten 1 +jungen 1 +Kosovos 1 +beträchtlichen 1 +Handicaps 1 +wirkliche 1 +zielgerichtete 1 +erschweren 1 +nötigen 1 +kompakten 1 +formschönen 1 +vereint: 1 +Gleichrichter 1 +Wechselrichter 1 +Ballastschaltung 1 +lobenswerten 1 +Gräueltaten 1 +distanzieren 1 +verabschiedet 1 +unzweifelhafte 1 +Tragödien 1 +PT: 1 +3D-Projekte 1 +begonnen 1 +Vieleicht 1 +Riviera 1 +Arbeitstags 1 +Arbeitsmarkt 1 +eingeschränkt 1 +Jh. 1 +errichteten 1 +Sveti-Nikola-Kirche 1 +55 1 +Stalag 1 +Bergen-Belsen 1 +Lazarett 1 +Baracken 1 +Arbeitslagers 1 +Stadt: 1 +Nekkerspoel 1 +Mechelen 1 +1152206 1 +Reihenhäuser 1 +ruhiger 1 +Party-Pakete 1 +zugeschnitten 1 +minimieren 1 +statistischen 1 +Unionsebene 1 +Ansprechpartner 1 +Stahlunternehmen 1 +operieren 1 +Autobahnen 1 +Dorfes 1 +Bauteilliste 1 +Untermenü 1 +Bauteilmenge 1 +familiengeführte 1 +Korallenstrand 1 +herrliche 1 +Blue-Flag-Strand 1 +130 1 +feierte 1 +hybridica 1 +Premiere 1 +165 1 +Wind 1 +Zehntel 1 +Ölraffinerien 1 +stieg 1 +Falzens 1 +Officebereich 1 +A4-Falzmaschine 1 +trendFOLD 1 +A3-Maschine 1 +smartFOLD 1 +untenstehender 1 +ausgefüllt 1 +Urteilen 1 +Brüssel-I-Verordnung 1 +Vollstreckungstitel 1 +geringfügige 1 +Mahnverfahren 1 +Problem: 1 +Gegenzug 1 +Nichteinmischung 1 +verlangten 1 +vertrieben 1 +formierten 1 +arg 1 +gebeutelten 1 +Schiffseignern 1 +Sackvoll 1 +zuwerfen 1 +eventuelle 1 +Mitreisenden 1 +Prüflaboratorien 1 +Gutachtern 1 +Antragsunterlagen 1 +herunterladbares 1 +begünstigen 1 +Natur- 1 +Kulturtourismus 1 +Schönes 1 +Flugzeugen 1 +Abhebens 1 +Flugzeugs 1 +Lärm 1 +enorme 1 +unglaublich 1 +wehren 1 +verweise 1 +hervorheben 1 +gezielter 1 +ausgeben 1 +Zahlreiche 1 +Winterwandermöglichkeiten 1 +(auch 1 +Nordic 1 +Walking 1 +Beschilderung) 1 +Rodelbahn 1 +gepflegten 1 +Zustands 1 +wedeln 1 +Kleinsten 1 +Kinderland 1 +Schneespielpark 1 +Kinderskitour 1 +Reformvertrag 1 +Änderungsvertrag 1 +eng 1 +Strukturmängeln 1 +gebe 1 +unumwunden 1 +Strukturmängel 1 +herzustellen 1 +Kleben 1 +dauern 1 +8-10 1 +Einzelfall 1 +(auf 1 +Anfrage). 1 +Tod 1 +Vaters 1 +Rolihlahla 1 +Wahlbezirksführer 1 +Finanzmärkte 1 +2011 1 +Auflagen 1 +Pass 1 +Verwalter 1 +Investmentfonds 1 +Hydrodämpfer 1 +OAO 1 +Transpnewmatika 1 +Urheberrecht 1 +SFGT 1 +zertifiziert 1 +EU-Materialien 1 +Mitwirkenden 1 +eingefügten 1 +Erläuterungen 1 +Übersetzungsarbeiten 1 +andauern 1 +alsbald 1 +sein). 1 +Tatbestand 1 +hindurch 1 +gentechnisch 1 +veränderter 1 +Sangenta 1 +demgegenüber 1 +universeller 1 +Intellektueller 1 +Sartre 1 +Irrsinn 1 +maoistische 1 +Splittergruppe 1 +hinein 1 +mitmacht 1 +irgendwie 1 +punktuelle 1 +zugeschnittene 1 +Begleitung 1 +Team- 1 +Change-Prozessen 1 +völligem 1 +Verfall 1 +nicetobook 1 +Hotelreservierung 1 +Tarife 1 +Distanzen 1 +Ad-hoc-Koalitionen 1 +möglicher 1 +Sicherheits- 1 +bedeutender 1 +Akteur 1 +Maßnahmenkatalog 1 +Ad-hoc-Tribunale 1 +Ruanda 1 +Weihnachten 1 +tritt 1 +schamlosen 1 +Expansionspolitik 1 +angetrieben 1 +einverleiben 1 +malerischen 1 +Halbinsel 1 +maximal 1 +wandern 1 +Fahrradtour 1 +vergnügen 1 +AEG 1 +Solutions: 1 +Entscheidend 1 +Zyanid 1 +Umweltgefahr 1 +Hilfsprogramme 1 +Schlüsselwörtern 1 +Phrasen 1 +Electronic 1 +Frontier 1 +Foundation 1 +(EFF) 1 +Teaching 1 +Bildungsinhalte 1 +ErzieherInnen 1 +digitalen 1 +Verantwortungen 1 +Fotografien 1 +Karel 1 +Konzertreise 1 +(New 1 +Chicago 1 +York) 1 +voraus 1 +Zalm 1 +demnächst 1 +vorwerfen 1 +Parlamentsgebäude 1 +Geldverschwendung 1 +Momente 1 +feiern 1 +konkrete 1 +gehabt 1 +UPE-Fraktion 1 +GATT-Abkommens 1 +auslegt 1 +Verlagerung 1 +Grundstoffen 1 +HISTORIK 1 +HOTELS 1 +internetgestützer 1 +Markenverbund 1 +außergewöhnliches 1 +Wohnambiente 1 +historischer 1 +Bausubstanz 1 +katastrophal 1 +Finanzsektor 1 +Rundfunk- 1 +Fernsehübertragungen 1 +CD-Produktion 1 +Aufführungen 1 +Honorar 1 +Übertragungen 1 +Produktionen 1 +untersagt 1 +128 1 +gewürdigt 1 +exploité 1 +depuis 1 +par 1 +famille 1 +Janinet 1 +Fils 1 +offre 1 +une 1 +ambiance 1 +familiale 1 +chaleureuse 1 +langweilig 1 +Formale 1 +totalen 1 +Stillstand 1 +gekommen; 1 +Lesens 1 +Schreibens 1 +unkundig 1 +ungebildet 1 +Sicherstellen 1 +Einziehens 1 +Multifunktionsgerät 1 +Belgier 1 +Berichterstatters 1 +Stabilisierungs- 1 +Assoziierungsabkommen 1 +Serbien 1 +Abhängig 1 +Binärdateien 1 +Marcin 1 +Libicki 1 +Vorsitzenden 1 +Petitionsausschusses 1 +geweckt 1 +polaren 1 +Lichtverhältnisse 1 +Wetterlage 1 +Täuschung 1 +Konturen 1 +schneebedeckten 1 +Berges 1 +auflösten 1 +UN-Beschlüsse 1 +Änderungsvorschläge 1 +befürworten 1 +Bisamberg 1 +vorkommende 1 +Flyschgestein 1 +Bodenschicht 1 +kalkigem 1 +Mergel 1 +unverkennbarer 1 +Mineralität 1 +trockenen 1 +Umweg 1 +Landweg 1 +Marktl 1 +grenzen 1 +Naturschutzgebiete: 1 +Innleite 1 +Dachlwand 1 +Untere 1 +Alz 1 +AKP-Wirtschaften 1 +Ärmsten 1 +Romantiker 1 +09: 1 +Dez 1 +reisten 1 +Wildnis 1 +Referendums 1 +bereitzustellen 1 +Zeitlinie 1 +Paketbetreuer 1 +korrekte 1 +aktualisierte 1 +Security-Team 1 +veröffentlichen 1 +Verfollständigung 1 +Ferienwohnungen 1 +konzentriertes 1 +schlagkräftiges 1 +höherem 1 +Prävention 1 +befähigen 1 +Leistungsspektrum 1 +Abteilung 1 +Blech 1 +Tech 1 +Blechbearbeitung 1 +Fassadentechnik 1 +Lohn- 1 +Baugruppenfertigung 1 +weder 1 +pre-in 1 +Kandidatenländer 1 +Hol 1 +kostenlosen 1 +Groove 1 +Coverage 1 +Newsletter 1 +fußläufiger 1 +Erreichbarkeit 1 +Finanzzentrum 1 +Grundwasser 1 +Stadium 1 +fortgesetzte 1 +Benachrichtigungszeitraum 1 +(dreißig) 1 +Einverständnis 1 +Geschäftsbedingungen 1 +gebunden 1 +Bloggern 1 +deutet 1 +generieren 1 +Visapolitik 1 +Marwyn: 1 +Lebzeiten 1 +ranghohe 1 +Offiziere 1 +Arthas 1 +Menethils 1 +Geißelkommandeure 1 +Lichkönigs 1 +Marwyn 1 +Hallen 1 +Reflexion 1 +betreten: 1 +Eindringlinge 1 +vernichten 1 +welch 1 +beimesse 1 +Manhattan 1 +Empire 1 +State 1 +Building 1 +Square 1 +Designhotel 1 +einladendes 1 +dementiert 1 +tragischen 1 +furchtbaren 1 +Grenzzwischenfall 1 +PowerPoint 1 +exakten 1 +zeitlichen 1 +Schaltung 1 +Inzwischen 1 +Eurojust 1 +-Ja 1 +Jungs 1 +Ausstellung 1 +vierteljährlicher 1 +Fanglizenzen 1 +Angelthunfischfänger 1 +Oberflächenlangleinenfischereiboote 1 +vorgesehenen 1 +Jahreslizenz 1 +reiche 1 +vermehrt 1 +Nahrungsmittelexporte 1 +Dritten 1 +Ihresgleichen 1 +engagiert 1 +Portfolio 1 +langweilten 1 +Geschichten 1 +Woodstock 1 +Schweigler 1 +ei 1 +Schreibweisen 1 +Mahlzeiten 1 +eingenommen 1 +Hausspezialitäten 1 +sind: 1 +selbstgetrocknetes 1 +Trockenfleisch 1 +Eistee 1 +Triftteller 1 +Mettelhornteller 1 +hausgemachte 1 +Rösti 1 +Nulltoleranz 1 +Kern 1 +Parishs 1 +Wichtiger 1 +Höchstmaß 1 +Herzlich 1 +degli 1 +Estensi 1 +charakteristischten 1 +Adriaküste 1 +Zugangstest 1 +Segmente 1 +Sigma 1 +Society 1 +Eindrucksvolles 1 +Grundstück 1 +470 1 +Nutzfläche 1 +Ebenen 1 +plus 1 +Keller 1 +sieht: 1 +Virenscanner 1 +Schema 1 +Zollpräferenzen 1 +(APS) 1 +Pflicht 1 +nachgekommen 1 +Zeitraums 1 +substanzielle 1 +Schemas 1 +CityDesk-Benutzer 1 +Beispieldateien 1 +CityDesk-Projektes 1 +cityd-search.zip 1 +Dolomiti 1 +Ballonfestivals 1 +Nachwuchs 1 +Faszination 1 +Ballonsport 1 +live 1 +hautnah 1 +Kurzer 1 +Annäherungsschlag 1 +steil 1 +herunterfällt 1 +rollt 1 +anständig 1 +Ausnahmeregelungen 1 +Alkoholeinfuhren 1 +Verbrauchsteuern 1 +Baglioni 1 +Absichten 1 +Etwas 1 +spätere 1 +letzter 1 +widersprechende 1 +Submitted 1 +Anonymous 1 +(nicht 1 +überprüft) 1 +on 1 +March 1 +16: 1 +42. 1 +russischer 1 +Held 1 +junger 1 +Student 1 +Petersburg 1 +Schurschadse 1 +Schurschin 1 +Ecuador 1 +Erzeugern 1 +Karton 1 +gezahlt 1 +Wiederverkaufspreis 1 +kï 1 +nnen 1 +Auskï 1 +nfte 1 +ï 1 +Trï 1 +ffel 1 +Generationen 1 +Bastide-Puylaurent 1 +Apotheke 1 +Basar 1 +Metzger 1 +Garage 1 +nahere 1 +benutzte 1 +irgendeine 1 +Rosen 1 +Unfallopfer 1 +Hauptaspekt 1 +wirtschaftlich 1 +bedingt 1 +Transportarten 1 +Personentransport 1 +Güterverkehr 1 +charmant-rustikale 1 +sportliche 1 +Footprint 1 +verlässliches 1 +Buchhaltungsinstrument 1 +basiert 1 +wissenschaftlichen 1 +Vorabentscheidungsfrage 1 +Gemeinschaftsrecht 1 +berichtigt 1 +Murter 1 +Vorgeschichte 1 +Illyrerzeit 1 +bevölkert 1 +Fakt 1 +sudanesische 1 +Befehlshaber 1 +Kony 1 +Entführungen 1 +Sicherheitssektors 1 +Kampfs 1 +Drogen 1 +souveränen 1 +sauberes 1 +TV 1 +Nha 1 +Trang 1 +zögerlichen 1 +1940-45 1 +1,1-1,5 1 +Kriegsgefangene 1 +Homosexuelle 1 +Häftlinge 1 +ermordet 1 +Rosa 1 +Díez 1 +González 1 +Trommelwirbel 1 +erzielten 1 +Asylfrage 1 +Pakts 1 +Menschenschmugglern 1 +Prostitution 1 +frühzeitige 1 +Identifizierung 1 +Flugsicherungsdienstleister 1 +öffentlichem 1 +privatem 1 +gemeinwohlorientierte 1 +erbracht 1 +Spielers 1 +Gelb 1 +oberen 1 +höheres 1 +Budget 1 +Beobachtungen 1 +Bewerberländer 1 +A321-Order 1 +Gesamtzahl 1 +bestellten 1 +Typs 1 +41; 1 +ausgeliefert 1 +Elektrisch 1 +Leiterplatte 1 +Leiterbahnen 1 +Löaugen 1 +Durchkontaktierungen 1 +Leiterplattenaufbau 1 +IDF 1 +Einzel- 1 +Doppel- 1 +DE 1 +LUXE 1 +vorgestanzten 1 +vormontierten 1 +Produktionsgeschwindigkeit 1 +Überlappung 1 +tatsächlichen 1 +Aufgabengebieten 1 +Gastland 1 +SFPL 1 +ESVP 1 +Petersberg-Missionen 1 +Antiterrorkampf 1 +Atomwaffen 1 +aufgeben 1 +Verteidigungsstrategie 1 +Kalte 1 +kalt 1 +EURES-Berater 1 +Mobilitätshemmnisse 1 +Dollarbananen 1 +Mittelamerika 1 +öfteren 1 +Europäisches 1 +kontinuierlicher 1 +Unterbringungszentren 1 +Migranten 1 +Mittelmeerländern 1 +Finanzen 1 +6. 1 +Abfrage 1 +bewirkt 1 +Zuordnung 1 +Bohrung 1 +Standardeinstellung 1 +Bohrungen 1 +überschreibt 1 +Datenbankverbindung 1 +Franck 1 +sämtlicher 1 +Zero 1 +88-Vertriebspartner 1 +Nord- 1 +problematisch 1 +Straßburg 1 +entsprechendem 1 +Preis- 1 +Leistungsverhältnis 1 +vorhandener 1 +Türöffner 1 +Strom- 1 +Lichtschalter 1 +versehentliches 1 +Licht-anlassen 1 +Aufwand 1 +ökologisch 1 +erzeugte 1 +Lebensmittel 1 +Hochfrequenz-Transceiver: 1 +Transceiver 1 +Kunstwort 1 +Transmitter 1 +Sender) 1 +Receiver 1 +Empfänger). 1 +beschreibt 1 +drahtgebundene 1 +drahtlose 1 +Sende- 1 +Empfangsschaltung 1 +ansprechen 1 +energiepolitischen 1 +betrifft: 1 +Bade 1 +ausschütten 1 +Gartenanlagen 1 +Cafeteria 1 +volles 1 +Kalanke-Urteil 1 +Rechtssache 1 +Marschall 1 +Hyves 1 +Gentoo 1 +Hosting 1 +Bugzilla-Cluster 1 +Stadtmauer 1 +Wachhund 1 +Rottweiler 1 +Gesetzlichkeit 1 +schweizerischen 1 +(KFPE) 1 +Akademie 1 +Naturwissenschaften 1 +(SCNAT) 1 +Themenbereichen 1 +Forschungsresultate 1 +Very 1 +comfortable 1 +clean 1 +had 1 +every 1 +thng 1 +you 1 +needed 1 +Chez 1 +aud 1 +speisen 1 +Keimung 1 +gut: 1 +90% 1 +hinausläuft 1 +armen 1 +Afrikas 1 +Fischereirechte 1 +abkauft 1 +Joachim 1 +Schwieren 1 +Editors 1 +mirabyte 1 +KG: 1 +8.5.5 1 +Anregungen 1 +Brüsseler 1 +Gibraltar 1 +herausstreichen 1 +Lady 1 +Gaga 1 +typisch 1 +mallorquinisch 1 +restauriert 1 +nachstehenden 1 +Diagrammen 1 +ersichtlich 1 +kehrte 1 +Wachstumsverlauf 1 +Wunder 1 +2008-09 1 +V 1 +-Muster 1 +Vollmitglied 1 +Ostseeschutzorganisation 1 +HELCOM 1 +Detail 1 +Bush 1 +tummeln 1 +kristallblaue 1 +Hvar 1 +vorurteilsfrei 1 +Kommissare 1 +Funktionsweise 1 +Prüfstand 1 +Ohridsee 1 +verkehren 1 +praktikabler 1 +Einreise 1 +Mitnahme 1 +Flüssigkeiten 1 +Flughäfen 1 +zuversichtlich 1 +geklärte 1 +bedient 1 +Firmenkunden 1 +erwiesenermaßen 1 +Wettbewerbsvorteil 1 +mitgewirkt 1 +AKW 1 +Mochovce 1 +umfassend 1 +zielgerichtet 1 +ausgabengerichtet 1 +PROLAM 1 +Y 1 +R 1 +Panel 1 +Santiago 1 +1987 1 +1990 1 +Wałęsa 1 +halblegale 1 +Zeitweise 1 +Exekutivkommittee 1 +Solidarność 1 +Neugründungen 1 +Geschäftsplan 1 +Bedingung 1 +legt 1 +3DOG 1 +camping 1 +Made 1 +Germany 1 +verspricht 1 +Nutzern 1 +unterwegs 1 +WLAN-Hotspots 1 +Foto- 1 +Videosharing-Webseiten 1 +hochzuladen 1 +Verwandten 1 +ermäßigten 1 +Sätze 1 +H 1 +enttäuscht 1 +wegfallen 1 +irgendjemanden 1 +sponsern 1 +Labour-Mitglieder 1 +froh 1 +lebenslanges 1 +Lernen 1 +unterstützende 1 +Auszeiten 1 +Müssen 1 +ähnlichen 1 +Lebensmittelindustrie 1 +Entschließungsanträge 1 +dominierten 1 +Konsolidierungen 1 +Steuererhöhungen 1 +Bailout 1 +Zyperns 1 +Rechnungshofs 1 +bedanken 1 +freundlichen 1 +Hessischer 1 +1952 1 +Sterne 1 +Superior 1 +Main 1 +Member 1 +Leading 1 +Übernachtungen 1 +Lussestova 1 +(Wohnung 1 +Zimmer), 1 +Krossen 1 +GVO 1 +Aarhus 1 +herausnehmen 1 +Pflegekräfte 1 +Wähler 1 +Ganzes 1 +Reihenfolge 1 +Erobern 1 +Brda 1 +Wda 1 +Piława 1 +Gwda 1 +Zbrzyca 1 +Chocina 1 +Słupia 1 +Łupawa 1 +Parsęta 1 +Drawa 1 +Wieprza 1 +Rurzyca 1 +Kajak 1 +soeben 1 +Pronk 1 +Missverständnisse 1 +Vorläufig 1 +Plänen 1 +schrankenlosen 1 +erteilen 1 +angebrachte 1 +Mahnung 1 +aufrückt 1 +Billigflaggen 1 +brutal 1 +Baath-Partei 1 +putschst 1 +stürzt 1 +Kassem 1 +Bagdad 1 +WASHINGTON 1 +Umstellen 1 +Liegestühle 1 +Deck 1 +Titanic 1 +Schiff 1 +Untergang 1 +bewahrt 1 +Idee: 1 +halben 1 +Flaschen: 1 +angezeigter 1 +Auszugsweisen-Verwendung 1 +Unterstellbahn 1 +Meraner 1 +Höhenweg 1 +Naturpark 1 +Texelgruppe 1 +Athenas 1 +Kotzia 1 +unmittelbaren 1 +Blocks 1 +heftigem 1 +spärlichen 1 +(Glatze) 1 +Haarwuchs 1 +REMOLANTM 1 +Wechselgebühren 1 +einflussreich 1 +Resultatsbuch 1 +Senat 1 +Souter 1 +90: 1 +Stimmen 1 +Tarn-Richter 1 +(stealth 1 +justice) 1 +Entscheide 1 +Kontroversen 1 +auslösten 1 +Staus 1 +Randgebieten 1 +Anbindungen 1 +CARO 1 +Maastricht-Vertrages 1 +sechzig 1 +glücklich 1 +Cam 1 +Sex 1 +Gesellschaftsrecht 1 +Harmonisierungsziel 1 +Arbeitnehmerrechte 1 +Mitbestimmungsstandards 1 +EU-Reformvertrag 1 +Begünstigung 1 +mittlerer 1 +Europa-2020-Strategie 1 +zahlreicher 1 +geplanter 1 +Leuchtturminitiativen 1 +familiäre 1 +überaus 1 +einschränkende 1 +5.5 1 +gewährten 1 +widerrufbar 1 +Mietpreisen 1 +Benzinkosten 1 +Strafzettel 1 +Reifenschäden 1 +Kfz-Schäden 1 +Befahren 1 +unbefestigten 1 +geteerten 1 +Staßen 1 +Papagayo-Stränden 1 +taiwanesische 1 +Katastrophenhilfe 1 +KIC 1 +Vorankündigung 1 +Stylisten 1 +Schauspieler 1 +Reporter 1 +weißen 1 +Zelt 1 +ausgefallenen 1 +Lampen 1 +Farbakzente 1 +erneuerbarer 1 +Gleichwohl 1 +deutlich: 1 +Entwicklungsrunde 1 +geschichtliche 1 +allesamt 1 +tragische 1 +Verwirklichungen 1 +Verwendungen 1 +Wärmebehandelten 1 +Produktionsentwicklung 1 +basierende 1 +bürgt 1 +Kontaktaufnahme 1 +Patente 1 +rasant 1 +verändernden 1 +Softwareentwicklungsmarkt 1 +Playern 1 +Autor 1 +Begeisterung 1 +Liebst 1 +Dress 1 +bevorzugst 1 +Girlieoutfit 1 +bevorzugtes 1 +Wunsch-Outfit 1 +erfülle 1 +trendige 1 +aufblasbare 1 +Eventequipment 1 +Warnung 1 +Erdgeschoss 1 +Aufenthaltsräume 1 +Gemeinschaftszimmer 1 +Unterlagen 1 +Referate 1 +Schulstunden 1 +Vorträge 1 +Biovision 1 +verwandten 1 +Karamanou 1 +überzieht 1 +Belastung 1 +Börsenmaßnahmen 1 +Vertrauensbeweis 1 +technologischen 1 +Regulierungssystem 1 +Sitzungsperiode 1 +Start-Up 1 +Player: 1 +Erfolgsgeschichte 1 +Ausführungen 1 +Sozialpartner 1 +ermessen 1 +höchst 1 +Redebeitrag 1 +vorab 1 +bescheinigen 1 +Erstmals 1 +aufgetaucht 1 +1888 1 +Ermässigung 1 +Extrabett: 1 +0-3 1 +50% 1 +((Sonderangebot 1 +gratis 1 +Jahreszeiten 1 +höchstens 1 +Kind)), 1 +wahren 1 +überzugehen 1 +Lebensmittelkontrollen 1 +kontinuierliche 1 +Standpunktes 1 +Beitrags 1 +Junitagung 1 +abschließenden 1 +vertraulicher 1 +Quelle 1 +unterrichtet 1 +erwägen 1 +befristeten 1 +Schutzmechanismus 1 +prospega 1 +bundesweite 1 +lokale 1 +Mediaplanung 1 +Prospektverteilung 1 +un- 1 +teiladressierte 1 +Haushaltwerbung 1 +zielgruppengenauem 1 +Direktmarketing 1 +geschildert 1 +Pflanzen 1 +Ökosysteme 1 +versauernden 1 +energisch 1 +Veräußerungen 1 +Nähen 1 +Kopfes 1 +Wangen 1 +Augen 1 +konsequent 1 +Männchen 1 +Signal-Handler-Race-Condition 1 +entfernten 1 +Angreifer 1 +sshd 1 +Absturz 1 +theoretisch 1 +beliebiger 1 +Ponuku 1 +môžete 1 +zverejniť 1 +na 1 +31 1 +dní 1 +Informácie 1 +balíkoch 1 +služieb 1 +nájdete 1 +v 1 +časti 1 +služby 1 +cenník 1 +reibungslos; 1 +Schaake 1 +Sacharow-Preis 1 +geistige 1 +Freiheit 1 +vorgelegten 1 +Terrorakten 1 +Wurzeln 1 +Pino 1 +Arlacchi 1 +Progressiven 1 +beigetreten 1 +Wr 1 +Mediation 1 +Vorlesungen 1 +beispielhaft 1 +Vielfältigkeit 1 +Widersprüchlichkeit 1 +Konflikthaftigkeit 1 +Zusammenlebens 1 +halbiert 1 +dividiert 1 +wende 1 +vereinbarte 1 +Reformagenda 1 +sinnvolle 1 +Versenden 1 +Gegenständen 1 +Mensa 1 +Schlafräume 1 +Stadtverwaltung 1 +Bedauern 1 +Erfordernisse 1 +Primärproduktion 1 +Vermarktung 1 +Erfordernissen 1 +Lebensmittelbereich 1 +umreißt 1 +Arbeitsmarktpolitik 1 +erreichte 1 +Revisions- 1 +Ablaufklauseln 1 +Abstimmungsergebnis 1 +Lieferung 1 +Wieviel 1 +Einschätzung 1 +Lüttge 1 +Verkehrspolitik 1 +Amsterdamer 1 +Beurs 1 +Berlage 1 +Besprechung 1 +Rezensionsexemplar 1 +(samt 1 +Verlagsanschrift) 1 +mit; 1 +US-politische 1 +Abwehrreaktion 1 +Ausländer 1 +aufkaufen 1 +Konstellation 1 +Unausgewogenheiten 1 +unternimmt 1 +Ressourcenverwaltung 1 +Geldbußen 1 +Strafen 1 +beklagen 1 +Kartellen 1 +fernhalten 1 +manchmal 1 +Treiber 1 +gleicht 1 +Kalenderprogramms 1 +PalmPilot 1 +KOrganizer 1 +Kontact 1 +Akonadi 1 +Ostblocks 1 +empfinden 1 +Dankbarkeit 1 +erstklassige 1 +erschwinglichen 1 +Honoraren 1 +erbringen 1 +Skandalen 1 +geschärft 1 +globalisierten 1 +ungerechten 1 +Verteilungen 1 +klagten 1 +Magenverstimmung 1 +Kopfschmerzen 1 +Augenflimmern 1 +Ohnmacht 1 +AddressDoctor 1 +Services 1 +Onlineprodukte 1 +passende 1 +zwielichtige 1 +Zeitgenossen 1 +Schuldeneintreiber 1 +aufsuchen 1 +Gasexplosion 1 +herbeiführen 1 +entschließt 1 +Julian 1 +anzunehmen 1 +undemokratisch 1 +eurozonenweite 1 +Inflationsziel 1 +künstlich 1 +geschürt 1 +Blickpunkt 1 +Weltmarktführer 1 +Ventilatoren 1 +jenem 1 +knappen 1 +Malta 1 +Offenlegungsstelle 1 +SIX 1 +erläutert 1 +einzelner 1 +Börsengesetz 1 +Börsenverordnung-FINMA 1 +BEHV-FINMA 1 +MailWasher 1 +mitteilen 1 +E-Mails 1 +E-Mail-Server 1 +EGOSOFT 1 +Forencrew 1 +Usern 1 +frohes 1 +Osterfest 1 +wachsendes 1 +Partner-Netzwerk 1 +mittleren 1 +höchstem 1 +Ethikkodex 1 +(Corporate 1 +Social 1 +Responsibility 1 +CSR) 1 +festgeschrieben 1 +– 1 +CropEnergies 1 +Hauptversammlung 1 +Börsengang 1 +rd 1 +1.000 1 +Balakowo 1 +Gemeindepräsident 1 +Beat 1 +Leistungsfähigkeit 1 +erfordern 1 +empfiehlt 1 +FlexDMM-Geräte 1 +407x 1 +PCI 1 +PXI 1 +IGEFI 1 +stetigem 1 +konsequentem 1 +Wachstumskurs 1 +qualifizierte 1 +innen 1 +ImageIron 1 +hochgenaue 1 +Erfassen 1 +Verzeichnungs- 1 +Vignettierungs-Parameter 1 +Objektiv-Kombination 1 +niedriger 1 +Preisklasse 1 +Registrierung 1 +Nutznießung 1 +Nutzungsbestimmungen 1 +Tauschbedingungen 1 +Schönheitspreis 1 +nachprüfbarer 1 +bewertet 1 +abgestelltem 1 +vorzugsweise 1 +Innenraum 1 +Dekoration: 1 +Beschichtungen 1 +einfallende 1 +direkte 1 +indirekte 1 +Dekorierungen 1 +Init-Kette 1 +modemspezifischen 1 +Variante 1 +anfangen 1 +Sorten 1 +Kommissaren 1 +Stimmrecht 1 +Gelbe 1 +Ei 1 +Teilreaktionen 1 +Festkörperzerstäubung 1 +Oberflächenstäuben 1 +Rückständen 1 +vorhergehenden 1 +Gasnitrocarburierens 1 +Oxidation 1 +Sarajevo 1 +(und 1 +deutsche) 1 +Firmen 1 +ansässig 1 +(Becks 1 +McKinsey 1 +Merkur 1 +Versicherung 1 +Volksbank 1 +etc.). 1 +Athen 1 +predigte 1 +Areopag 1 +(Areshügel) 1 +unbekannten 1 +(Apg 1 +17: 1 +34). 1 +(EWG) 1 +4056 1 +86 1 +niedrigeren 1 +Tarifen 1 +Musikpavillon 1 +bronzerne 1 +Statue 1 +Kálmán 1 +Varga 1 +Aufstellungsort 1 +allready 1 +gebend 1 +Giansily 1 +Devisenreserven 1 +PORTAGE 1 +NICENESS 1 +Nice 1 +unterwirft 1 +Grundrechtskontrolle 1 +EGMR 1 +desillusionierten 1 +Bevölkerungen 1 +Citylage 1 +Spree 1 +Friedrichstrasse 1 +Klang 1 +Telefons 1 +Küchenhintergrund 1 +Küchenzubehör 1 +Kartenaufbau 1 +200% 1 +Internetzugang 1 +Downloadzeit 1 +läge 1 +Außergewöhnlichkeit 1 +bewahrten 1 +Waghalsige 1 +verzweifelt 1 +tätigen 1 +reale 1 +ACORD 1 +Fahrzeugaufbauen 1 +Containern 1 +Kühlwagen 1 +Wagenverkleidungen 1 +Kraftfahrzeuganhängern 1 +-halbhängern 1 +Piebalgs 1 +Produktionsverfahren 1 +kleinere 1 +Sicherheitstonnagen 1 +Bundesstrasse 1 +N-IV 1 +AP-4 1 +(Autopista 1 +gebührenpflichtig 1 +kürzer) 1 +rekonstruierte 1 +Cafe-Restaurant 1 +Gioia 1 +(lies 1 +džoja 1 +Italienisch 1 +Freue). 1 +Weltweit 1 +Tausende 1 +digitaler 1 +Markus 1 +aktives 1 +User 1 +Groups: 1 +LUG 1 +Flensburg 1 +ernste 1 +Überschwemmungen 1 +Regelmäßigkeit 1 +hochtrabende 1 +RSS 1 +Impressum 1 +berkutschi 1 +skijumping 1 +Grrrr 1 +25. 1 +ungenaue 1 +Nichtübermittlung 1 +cosmetic 1 +surgery 1 +30.04.2009 1 +Ulm 1 +Neu-Ulm 1 +Naziaufmarschs 1 +Oberverwaltungsgericht 1 +endgültig 1 +gescheitert 1 +Werbespots 1 +französisch-schweizerischen 1 +bewaldeten 1 +Genf 1 +kosmopolitischen 1 +Kulturzentrum 1 +Unterdrückung 1 +ineffiziente 1 +korrupte 1 +Mugabe 1 +Schikanierung 1 +Gegnern 1 +Gewerkschaftsaktivisten 1 +Farmern 1 +manche 1 +gewünscht 1 +Eins 1 +Schwimmbädern 1 +sommers-winters 1 +Thermalwasser 1 +Firmengruppe 1 +Companies 1 +distribuiert 1 +Axialventilatoren 1 +industrielle 1 +agrarische 1 +Markte 1 +Einmischung 1 +realistisch 1 +nützlich 1 +Energiedialoge 1 +Generell 1 +Multivitaminsäfte 1 +zwölf 1 +Fruchtsorten 1 +Festpreisen 1 +Katalogen 1 +freibleibend 1 +Bezahlung 1 +fällig 1 +ausreichende 1 +Kreditwürdigkeit 1 +problemlose 1 +Münzsystems 1 +seitens 1 +Nutzer 1 +Unterscheidbarkeit 1 +Münzen 1 +visueller 1 +tastbarer 1 +Teamarbeit 1 +Rechtssachverständigen 1 +Exportkontrollverantwortlichen 1 +konsistente 1 +Hauptbestandteil 1 +Abwrackfonds 1 +Stromkreise 1 +Leuchtdioden- 1 +anzeige 1 +Wippschaltern 1 +zusätzlichen 1 +A. 1 +Voltmeter 1 +Umschalter 1 +Drehspulmesswerk 1 +hektische 1 +Wählen 1 +Symbol-Schema 1 +gelöscht 1 +Schaltfläche 1 +raschen 1 +Bangkok 1 +Vertiefung 1 +Dezentralisierung 1 +Fluss-kontrollierten 1 +ISO 1 +9001 1 +Montage 1 +Inhaftierten 1 +anderswo 1 +gefoltert 1 +Verkehrszeichen 1 +Louis 1 +Berger 1 +durchgefuehrt 1 +EBRD 1 +bevollmaechtigt 1 +war). 1 +bemerken 1 +einigem 1 +niederländischen 1 +CDA 1 +vorgeschlagene 1 +Tabakwerbung 1 +Bestelldaten 1 +Sicherheitsgründen 1 +Hochgurgl 1 +3000-er 1 +Erlebnis 1 +grandiosen 1 +unbeliebtere 1 +Argentinien 1 +einstellte 1 +Energieministerium 1 +PV-Industrie 1 +168,3 1 +Neuregulierung 1 +Risikoüberwachung 1 +Umso 1 +stabilisieren 1 +relative 1 +eingekehrt 1 +Tanusevcí 1 +vorübergehend 1 +UCK 1 +Nokia 1 +Siemens 1 +Networks 1 +Adventure-based 1 +Learning 1 +evangelistischen 1 +Broschüren 1 +Bibeln 1 +Wortspielereien 1 +Osterurlaub 1 +Bibione 1 +sichergestellt 1 +bewußt 1 +diplomatische 1 +konsularische 1 +Heimatstaat 1 +Delingua 1 +Finnland 1 +ansässiges 1 +Übersetzungen 1 +Dolmetschdienste 1 +Sprachunterricht 1 +sekundäre 1 +beschaffen 1 +feststelle 1 +Nummern 1 +Junge 1 +Heiligabend 1 +Offenheit 1 +· 1 +zugesicherten 1 +Warn- 1 +Entscheidungszeiten 1 +Abfeuern 1 +nuklear 1 +Raketengeschossen 1 +Freie 1 +Formate 1 +eigener 1 +islamischen 1 +Ostseeregion 1 +Nördlichen 1 +ausreicht 1 +anzusprechen 1 +abschließend 1 +Thongpaseuth 1 +Keuakoun 1 +Kamphouvieng 1 +Sisaath 1 +Seng-Aloun 1 +Phenphanh 1 +Bouavanh 1 +Chanhmanivong 1 +Keochay 1 +inhaftiert 1 +Vientiane 1 +friedlich 1 +demonstriert 1 +aufgerufen 1 +entbrannte 1 +Hochelfen 1 +wofür 1 +T-Energie 1 +internationaler 1 +Nachfolgekonferenz 1 +Weltgipfel 1 +langsame 1 +unzulängliche 1 +schweigen 1 +Unterschlagungen 1 +unzureichenden 1 +Projektdurchführung 1 +Beitritts 1 +EWU 1 +Wartezeit 1 +marschierten 1 +ein; 1 +Preußen 1 +Kontribution 1 +Napoléon 1 +marschierte 1 +russischen 1 +Zar 1 +Alexander 1 +Mächten 1 +aufteilte 1 +Römischen 1 +Verträgen 1 +Wettbewerbspolitik 1 +MGI 1 +CompanyScope 1 +Umsätzen 1 +Milliarde 1 +Gesundheitsschutz 1 +oftmals 1 +Schwäche 1 +nachvollziehbaren 1 +Kaufpreises 1 +verzögert 1 +Kaufpreis 1 +Frist 1 +Woche) 1 +Inselregionen 1 +Aram 1 +Dikiciyan 1 +150 1 +verstärkten 1 +Patentschutzes 1 +motivieren 1 +Gruppenmitglieder 1 +Schlachtzugsmitglieder 1 +zugegangen 1 +verifizieren 1 +wonach 1 +Maus 1 +kitten 1 +Dokumente 1 +Verallgemeinerung 1 +Unterrichtung 1 +Wasserqualität 1 +einheitliches 1 +Warnsystem 1 +Symbolen 1 +Steuererhöhung 1 +senken 1 +Eifer 1 +Neubekehrten 1 +stört 1 +Schlafen 1 +Decken 1 +Blau 1 +Centre 1 +Benzol 1 +Karzinogenen 1 +ergriffen 1 +Exmoor 1 +Lebensraum 1 +Wildtiere 1 +Paradies 1 +Wanderer 1 +Besichtigen 1 +Kunsthandwerkszentrum 1 +Alcoa-Broschüre 1 +Technische 1 +Alcoa 1 +Wheel 1 +Products 1 +Europe 1 +taido 1 +Artfacts.Net 1 +Internetseite 1 +Kunstgeschehen 1 +widerzuspiegeln 1 +Kunstmarkt 1 +Aufgrund 1 +Implementierung 1 +öffentlich-private 1 +ausgewogenen 1 +größerer 1 +schnellstmöglichen 1 +verhängten 1 +Zusätze 1 +Mörtels 1 +Zusatzes 1 +aufheben 1 +T-Shirts 1 +Andenken 1 +einfarbige 1 +Wei 1 +szlig 1 +Innenausstattung 1 +wenden 1 +AKVIS 1 +Coloriage 1 +Objekt 1 +Pratt 1 +Whitney 1 +Canada 1 +PT6A-25 1 +T-34A 1 +Kolbenmotor 1 +US 1 +Force 1 +Bettwäsche 1 +mieten 1 +(NOK 1 +50). 1 +geholfen 1 +Kostete 1 +voraussichtlich 1 +peinlich 1 +anzugeben 1 +vermögen 1 +Sowohl 1 +zivilen 1 +milit 1 +rischen 1 +Luftfahrtbereich 1 +exzellente 1 +Reputation 1 +ständige 1 +Präsenz 1 +Ministerin 1 +Lehtomäki 1 +kurzes 1 +Par 1 +Birdie-Chance 1 +Rettung 1 +Scores 1 +Continental 1 +Plymo 1 +saugen 1 +fressen 1 +Flying 1 +Foxes 1 +heilig 1 +Delikatesse 1 +gegrillt 1 +elf 1 +Verhandlungsrunden 1 +Wellen 1 +geschlagen 1 +Turbulenzen 1 +Muttergesellschaften 1 +SWX 1 +Beteiligungen 1 +(85% 1 +15%). 1 +Ökonomen 1 +Rüdiger 1 +Fahlenbrach 1 +René 1 +Stulz 1 +Annahmen 1 +2006-2008 1 +CEOs 1 +Finanzinstituten 1 +überprüft 1 +Kittelmann 1 +anstreben 1 +abzeichnet 1 +Schlichtung 1 +wiederaufgenommen 1 +Türme 1 +104 1 +Gewölbe 1 +Mittelschiff 1 +Basislager 1 +Banken- 1 +Versicherungsnetzes 1 +aufsichtsrechtlichen 1 +Vientos 1 +Geoff 1 +Muldaur 1 +Pelham 1 +älterer 1 +traditionellem 1 +Orleans-Jazz 1 +Doo 1 +Wop-Quartettgesang 1 +Schrei 1 +dockbar 1 +2.3 1 +Docks 1 +Dialoge 1 +Angeles 1 +Seattle 1 +Sydney: 1 +OU 1 +Stimmung 1 +depressiv 1 +Modernisierungsverlierer 1 +Gruppen: 1 +äusserlichen 1 +illuminierten 1 +Werbeschilder 1 +(3) 1 +Gut 1 +GUE 1 +NGL 1 +Bewirtschaftungsplan 1 +unsinnig 1 +separate 1 +gelungenen 1 +Kompromisses 1 +Gehalt 1 +Berechnung 1 +Beispielsweise 1 +Kriminalität 1 +wünschenswert 1 +östlichen 1 +Beitrittskandidaten 1 +Europol 1 +angehören 1 +fabelhaften 1 +Strategiespiel 1 +überleben 1 +RWS-Büchsenpatronen 1 +beispiellosen 1 +Siegeszug 1 +angetreten 1 +Jägern 1 +geschätzt 1 +Sportschützen 1 +tiefen 1 +Eingriff 1 +Village 1 +Garden 1 +Paradiso 1 +Lebensstil 1 +boomenden 1 +Russlands 1 +entscheidendem 1 +Landessprache 1 +Skripten 1 +Folien 1 +Elektronische 1 +CDs 1 +PORTAL 1 +ÖPNV 1 +Las 1 +Marinas 1 +Resort 1 +unvergleichlichen 1 +exklusivsten 1 +Gegenden 1 +Lanzarote 1 +residentialen 1 +Teguise; 1 +unvergesslichen 1 +erlebnissreichen 1 +Aussehen 1 +Nachttöter- 1 +Rechtbringer- 1 +Bestienjäger-Sets 1 +überarbeitet 1 +Logik 1 +Steuerbetrugs 1 +Neoliberalismus 1 +Scheitern 1 +Millenniums-Entwicklungsziele 1 +bescheiden 1 +Betrugs 1 +belangt 1 +vorübergehenden 1 +Nichtverfügbarkeit 1 +Zyanidtechniken 1 +Bergbau 1 +Umweltschäden 1 +Bedrohung 1 +Flexibilisierung 1 +Hammer 1 +Entry 1 +Insiderpapiere 1 +Insidergesetzgebung 1 +Marktmissbrauch 1 +BaFin 1 +beaufsichtigt 1 +WarenkorbSie 1 +produkte 1 +Einkaufswagen 1 +bezahle 1 +anfallenden 1 +Betrag 1 +befürworteten 1 +sichern 1 +Neuerungen 1 +qualifizierten 1 +Esslöffel 1 +globales 1 +SEC 1 +Ultraweitwinkelobjektivs 1 +verzerrt 1 +Austrian 1 +Fluglinie 1 +Zentral- 1 +Geburtsdatum 1 +Geburtsort 1 +Machauts 1 +weckte 1 +Automantenwolf 1 +AU 1 +Neuentwickelt 1 +Zerkleinerung 1 +gefrorenen 1 +Fleischblöcken 1 +Frischfleisch 1 +angefertigt 1 +Grundgedanke 1 +Semesters 1 +Ex-ante-Koordinierung 1 +schmackhafte 1 +10: 1 +Zimmerrate 1 +inkludiert 1 +MGM 1 +HOSTEL 1 +angenehmen 1 +Zagreb 1 +Nike 1 +schaltete 1 +Reklamen 1 +Bryant 1 +eigenes 1 +Markenzeichen 1 +Fremdenverkehr 1 +Wijsenbeek 1 +charakteristisch 1 +durchdachten 1 +überlegten 1 +erheblich 1 +Fußgängern 1 +Fahrzeugführer 1 +Fahrzeuginsassen 1 +Terra 1 +Select 1 +Co.KG 1 +benutzerfreundlichem 1 +Mussolini 1 +Alimentationen 1 +M. 1 +peinlichen 1 +Auftritte 1 +Dalsers 1 +(kirchlich) 1 +Ehe 1 +beruhen 1 +Standardprodukt 1 +Systemverfügbarkeit 1 +Zuruf 1 +Großrohren 1 +Durchmesser 1 +2.000 1 +mm 1 +Summary 1 +(rechts 1 +Balloon 1 +Variablen 1 +aufgelistet 1 +HTML-Code 1 +tschechische 1 +Barcelona-Ziele 1 +machbare 1 +anstrebt 1 +gebührendem 1 +Interessengruppen 1 +Ballon-Tips 1 +Delphi 1 +Anwednung 1 +Haushaltsplans 1 +detaillierte 1 +Einseitige 1 +Ergänzungen 1 +unwirksam 1 +LateRooms 1 +Komplettpreise 1 +Brilgar 1 +Conrad 1 +Ce 1 +moment 1 +restera 1 +jamais 1 +gravé 1 +ma 1 +mémoire 1 +comme 1 +mon 1 +Un 1 +grand 1 +Merci 1 +avoir 1 +fait 1 +moi 1 +rêveur 1 +pragmatique 1 +ausgefeilten 1 +Ansammlung 1 +Grenzkontrolle 1 +Passage 1 +MigrantInnen 1 +selektieren 1 +filtern 1 +Meeresfrüchte 1 +flambierte 1 +praxisnahen 1 +Artenschutz 1 +Verkehrswegen 1 +Leitgedanken 1 +organisatorischen 1 +(Wenn 1 +regeneriert 1 +80-iger 1 +90-iger 1 +Kellereien 1 +schliesslich 1 +erhoffte 1 +Herkunftsprädikat 1 +Binissalem 1 +vorantreibt 1 +Gerichtsverfahren 1 +Leyla 1 +Zana 1 +Poliester 1 +materialen 1 +Automateriale 1 +Luksus 1 +Ikw 1 +Zisternen 1 +Schwimmerbader 1 +etc. 1 +Verkuf 1 +Ex-Jugoslavien 1 +Gestalt 1 +Führerscheins 1 +verkehrt 1 +Geisterstunde 1 +Nous 1 +venons 1 +apprendre 1 +ce 1 +matin 1 +décès 1 +Michel 1 +PRONIER 1 +F5ODS 1 +survenu 1 +fin 1 +journée 1 +autoritär 1 +Fußbälle 1 +Kappen 1 +Vermögenswerte 1 +ausgetrocknet 1 +unterstreichen 1 +bisherige 1 +Tanzt 1 +Drachen 1 +Besprechungsräume 1 +grundsätzlich 1 +wettbewerbsneutral 1 +Normalsteuersätze 1 +Abschaffung 1 +Einfuhrzölle 1 +Reis 1 +Bananen 1 +Zucker 1 +standardisierten 1 +wettbewerbsfähigen 1 +vor: 1 +Technology 1 +entwirft 1 +baut 1 +Sicherheitsstufe 1 +Gefährlich 1 +Router 1 +Fallback-Funktion 1 +DSL-Verbindungsausfall 1 +Einwahl 1 +ISDN) 1 +Faxdienste 1 +Frustration 1 +Unfähigkeit 1 +herrührt 1 +Eisenbahngesellschaften 1 +Hauptbereichen 1 +auszurichten 1 +Naturkatastrophen 1 +deutliches 1 +Alarmsignal 1 +konsequentere 1 +pulsierenden 1 +Javier 1 +Solana 1 +MdEP 1 +Tony 1 +Blair 1 +starke 1 +Sieger 1 +einziger 1 +Ring 1 +übrig 1 +Einwicklungsländer 1 +Junktim 1 +Bildungsniveau 1 +wirtschaftlichem 1 +trügerisch 1 +Erneuerung 1 +Staatsbetrieben 1 +erschien 1 +app 1 +Video 1 +Hansafilm 1 +UG 1 +Linksetzung 1 +verlinkte 1 +Katechismus 1 +LUMIX 1 +DMC-FX150 1 +Kompaktkamera 1 +Bequemlichkeit 1 +VERTU 1 +Signature 1 +Cobra 1 +Replika 1 +ungeheuerlichen 1 +310.000 1 +300-mal 1 +billiger 1 +gerichtlichen 1 +ernten 1 +beobachtende 1 +Instanz 1 +Welthandelsorganisation 1 +Suchoption 1 +Ankunfts- 1 +Abreisedatum 1 +gewünschten 1 +haken 1 +verfügbare 1 +eindrucksvolle 1 +Statistik 1 +Frustrationen 1 +Almosen 1 +Tags: 1 +Nvu 1 +Tsimberg 1 +Visconti 1 +moralischen 1 +Beitrittsstatus 1 +Alkoholgehalt 1 +Lagerzeiten 1 +beträchtlich 1 +garantierte 1 +Mindestfrischedatum 1 +Hopfingerbräu 1 +26.05.2006 1 +Brauhaus 1 +Lindenbräu 1 +Sony 1 +Potsdamer 1 +falsche 1 +Hoffnungen 1 +unverfrorener 1 +attackieren 1 +naiv 1 +abzusegnen 1 +Schlüsselelemente 1 +Ziels 1 +EU-Patentes 1 +Unterfangen 1 +hinzieht 1 +Gesetzgebung 1 +geeigneteren 1 +Zeitpunkten 1 +Plenarwoche 1 +Riss 1 +EU-weit 1 +68 1 +Augenmerk 1 +benachteiligter 1 +Grundsätze 1 +einzuschlagen 1 +Péry 1 +anstehen 1 +Gaddafi 1 +Technokraten 1 +Homestays 1 +Jugendlichen 1 +Gastfamilien 1 +Einblicke 1 +jeweilige 1 +IND 1 +DEM-Fraktion 1 +Letzte 1 +Jubel 1 +Skies 1 +handelsbezogenen 1 +Auszahlungen 1 +CKFX 1 +reibungslos 1 +Verbessern 1 +Arrangementpreis 1 +Gepäckstück 1 +(max 1 +kg) 1 +nützlichen 1 +Vision 1 +Bündnis 1 +kennen 1 +Colors 1 +Of 1 +Kniffe 1 +Tricks 1 +lässigen 1 +Gitarrenakkorden 1 +Gassenhauer 1 +zimmern 1 +Servicemarken 1 +Markennamen 1 +Besitzer 1 +Ferber 1 +schwebt 1 +gearteten 1 +zentralistischen 1 +Organismus 1 +zurückzukehren 1 +Zyklus 1 +Kompositionen 1 +Mendelssohns 1 +Partition 1 +Festplatte 1 +höchstwahrscheinlich 1 +Präventivschlag 1 +diplomatischen 1 +vereinbarten 1 +Marsch 1 +Bombenbesitz 1 +Erhalt 1 +Anbaus 1 +Schalenfrüchte 1 +differenzierte 1 +GMO 1 +Gemüse 1 +verärgert 1 +Tabakgiganten 1 +zuzugestehen 1 +Krebs 1 +Gelder 1 +Tabakanbauer 1 +trug 1 +Jesajas 1 +hatte; 1 +vermischt 1 +zerstreut 1 +Erdgeschossebene 1 +Vorrichtungen 1 +fà1 +¼ 1 +r 1 +Monoraillifter 1 +Fragestunde 1 +(B5-0163 1 +2001). 1 +Achten 1 +Leerraum 1 +Absätzen 1 +subregionalen 1 +Bewirtschaftung 1 +sinnvolles 1 +staatliches 1 +Handeln 1 +Fischereibereich 1 +Setup 1 +Benutzerhandbuch 1 +Beispielprogramme 1 +Pian 1 +Novello 1 +Hotel-ALPINE 1 +Gaststätte 1 +Geschmacks 1 +sapori 1 +Begonnen 1 +Originaltextes 1 +Grill-Restaurant-gemütliche 1 +parlamentarische 1 +ETA 1 +entsagt 1 +entsagen 1 +Geregelt 1 +Bälde 1 +voneinander 1 +offensichtlichen 1 +Verteilungssysteme 1 +Heinz-Harald 1 +Frentzen 1 +Aussagekraft 1 +Rundenzeiten 1 +Vorsaison 1 +Weitsicht 1 +zukommen 1 +70 1 +LOUIS 1 +XIV 1 +MAZARIN 1 +dreißig 1 +Vorauskasse 1 +gewünschte 1 +Lieferadresse 1 +Geschenk 1 +Überraschung 1 +Kluft 1 +überbrücken 1 +Computerviren 1 +Software-Tools 1 +Reserve 1 +1921 1 +1951 1 +Serengeti 1 +National 1 +Tansania 1 +14% 1 +Pro-Kopf-Anteil 1 +Monster 1 +Spielzeug 1 +sprengt 1 +Arschfotze 1 +verkraftet 1 +supergeil 1 +angespannten 1 +Budgeterhöhungen 1 +geradezu 1 +Unverschämtheit 1 +kenianische 1 +Richtern 1 +Commonwealth 1 +durchgeführte 1 +starkes 1 +bürokratisch 1 +Giants 1 +begraben 1 +vernichtet 1 +versteckt 1 +E-Mailadresse 1 +WYSIWYG 1 +(wie 1 +Gästebuch) 1 +lcd 1 +Einsicht 1 +verschlossenen 1 +Strg-C 1 +anschließend 1 +Principe 1 +Fitalia 1 +renovierten 1 +herrschaftlichen 1 +Moldau 1 +instabilen 1 +Zone 1 +Gestatten 1 +Anbaupflanze 1 +anführe: 1 +Pkw 1 +Verena 1 +(22) 1 +Heels 1 +Ballerinas 1 +Riemchen 1 +Einzelperson 1 +Symbol 1 +Ausrufung 1 +Fassadenkonstruktion 1 +fischer-Ankern 1 +-Injektionsmörtel 1 +befestigt 1 +enge 1 +Terminplan 1 +Skigebiet 1 +Gstaad 1 +Rougemont 1 +Servern 1 +aufteilt 1 +erm 1 +glicht 1 +skalierbare 1 +Datenanalysen 1 +befürchte 1 +Branche 1 +ehe 1 +täte 1 +1.7 1 +31.8 1 +Application 1 +Download-Center 1 +Consulting 1 +Vila 1 +Real 1 +Chaves 1 +Portugals 1 +Messing 1 +Eisen 1 +Aluminium 1 +Bronze 1 +Speziallegierungen 1 +führe 1 +Hauptkomponentenanalyse 1 +(HKA) 1 +XLSTAT 1 +Unbewußtheit 1 +hypnotisierten 1 +Probanden 1 +erklärte 1 +Hemmung 1 +Cortex 1 +Suggestionen 1 +angeregten 1 +Zentren 1 +Automatismen 1 +hervorgerufen 1 +Heidenhain 1 +Phänomene 1 +Hypnose 1 +vage 1 +Meinungsäußerung 1 +beförderten 1 +Automatismus 1 +BürgerInnenrecht 1 +Austeilen 1 +Riverkarte 1 +Wetten 1 +Runde 1 +Internetauftritt 1 +Karrierechancen 1 +Evonik 1 +Saar 1 +Anbindung 1 +Nahverkehrsnetz 1 +Messegelände 1 +Golfclub 1 +Naturparks 1 +preiswerten 1 +niemals 1 +Erweiterungen 1 +wettbewerbsverzerrenden 1 +Andererseits 1 +infiziertem 1 +Truthahnfleisch 1 +Lebensmittelprodukte 1 +Ursprungskennzeichnung 1 +zulässig 1 +(Service 1 +Operation) 1 +Weiterleiten 1 +Incident 1 +Change 1 +technisches 1 +Erfahrungsschatz 1 +Bieten 1 +1993 1 +zugestanden 1 +Hennegau 1 +Ziel-1-Gebiet 1 +berechtigt 1 +Reisebeginn 1 +zurückzutreten 1 +Regulierungsbehörden 1 +Ratingagenturen 1 +beaufsichtigen 1 +Zwangsmaßnahmen 1 +Selbstfahrer 1 +Mopedfahrer 1 +Tragen 1 +Sturzhelmes 1 +vorgeschrieben 1 +Anstatt 1 +großzügiger 1 +Steuerbehörden 1 +erfasster 1 +Diäten 1 +Bezügen 1 +strikt 1 +unterschieden 1 +Gerät 1 +kauft 1 +Risercard 1 +bestückt 1 +icefest 1 +-Tagsuche 1 +gelangte 1 +Ugandas 1 +Kalibbala 1 +kennenlernte 1 +Not 1 +abbrechen 1 +Tongo 1 +Supermacht 1 +enormen 1 +Öl- 1 +Gasreserven 1 +Energiesupermacht 1 +Putin 1 +Ehebett 1 +Wohnraum 1 +Sofa 1 +Mandat 1 +Gesandten 1 +Moratinos 1 +Standardbrowser 1 +URL 1 +Schmerzgrenze 1 +Tschechien 1 +zusammengehalten 1 +Ob 1 +Amerikaner 1 +Limousinen- 1 +Autovermietung 1 +Kürzungen 1 +Kommunalbehörden 1 +Gemeinderäte 1 +Abfallsammlung 1 +Herstellern 1 +Beitragsrabatt 1 +fünfeinhalb 1 +(wenn 1 +KStars 1 +hergestellt). 1 +Geisterfahrer 1 +ause 1 +inander 1 +Sprachrohr 1 +Lungenkrebspatienten 1 +Globale 1 +Lungenkrebskoalition 1 +verbesserten 1 +Krankheitsverlauf 1 +potenziellen 1 +Wasserverschmutzung 1 +grenzübergreifende 1 +europaweiten 1 +Rechtsetzung 1 +Regenwälder 1 +schützen; 1 +Schlüsselthema 1 +Wälder 1 +20% 1 +Treibhausgasemissionen 1 +ausgerichtet 1 +anzuleiten 1 +häuslichen 1 +qualitatives 1 +ABA 1 +VB-Programm 1 +Bundesstaat 1 +Bahia 1 +originalen 1 +Boulder 1 +Dash 1 +ausgegraben 1 +unverzügliche 1 +Milosevic 1 +Rugova 1 +Bildungswesen 1 +Präsidentenwahl 1 +Spekulation 1 +Dennis 1 +Meadows 1 +Mitautor 1 +wegweisenden 1 +Buches 1 +1972 1 +Wirtschaftswachstum 1 +Wortschatz 1 +Einzug 1 +Freuen 1 +Mischung 1 +Tradition 1 +Lege 1 +Mechanische 1 +Streitroß 1 +Eurokrise 1 +Registrierungsprozedur 1 +erledigt 1 +e-mail 1 +Registrierungsschlüssel 1 +entschlossen 1 +ENISA 1 +baldmöglichst 1 +Massagekabinen 1 +türkisches 1 +Vichy 1 +dynamisches 1 +Schwimmbecken 1 +Jacuzzi 1 +Wanne 1 +Wassermassage 1 +Kreisdusche 1 +Alegnbad 1 +Schönheitsbehandlungen 1 +Se 1 +vende 1 +lote 1 +64000 1 +mt2 1 +Vereda 1 +Zarzal 1 +Municipio 1 +Copacabana 1 +(Antioquia) 1 +construir 1 +finca 1 +weitverbreitetes 1 +gelagert 1 +Gleichsetzung 1 +Homosexualität 1 +physischer 1 +Nekrophilie 1 +moralisch 1 +abstoßend 1 +zutiefst 1 +verachtenswert 1 +anpackt 1 +Duplex 1 +Schlafzimmern 1 +Schlafsofa 1 +Zubehör 1 +Abstellraum 1 +nehme 1 +Jackson 1 +gebrauchen 1 +nämlich: 1 +dasselbe 1 +denselben 1 +optische 1 +Individualisierungen 1 +Speaking 1 +with 1 +Julio 1 +verschwinden 1 +umgebracht 1 +Frechheit 1 +emphatischen 1 +vorkommen 1 +Technologieführer 1 +826 1 +7580 1 +optionale 1 +Multipanel-Ansicht 1 +integrierten 1 +bedienenden 1 +Benutzeroberfläche 1 +Seearbeitsübereinkommen 1 +Arbeitsorganisation 1 +weitreichendes 1 +Präferenzregelungen 1 +bilaterale 1 +Streitfällen 1 +Handelspartner 1 +Kontext 1 +Gaskrise 1 +Feststellungen 1 +drop-down 1 +Tischlimits 1 +begrüßt 1 +Haushaltsjahr 1 +gründliche 1 +Bewertung 1 +Überwachungs- 1 +Gerichtshofes 1 +wiegenden 1 +Rechtsverletzung 1 +Swiebel 1 +verwehrt 1 +Verfahrensdauer 1 +Strukturfondspolitik 1 +frische 1 +Handtücher 1 +Pomés 1 +Ruiz 1 +FastViewer 1 +KG 1 +branchenunabhängige 1 +3000 1 +Würde 1 +ArbeiterInnen 1 +Solidaritätsstreiks 1 +Zeitspanne 1 +Durchführungsbestimmungen 1 +Autoconf 1 +2.13 1 +benötigt 1 +Debian-Pakete 1 +enthielten 1 +Build-Abhängigkeit 1 +Heimat 1 +architektonische 1 +Leipziger 1 +Experimentale 1 +Lebensmittelbehörde 1 +Vorschlägen 1 +Mix-Wafern 1 +Shrimps-Paste 1 +Wasserkastanien 1 +Ingwer 1 +Eigelb 1 +Schüssel 1 +2011-Haushalts 1 +breite 1 +gerichtete 1 +Implantate 1 +Modell 1 +Bundesregierung 1 +nachvollzogen 1 +Springsteen 1 +zweierlei 1 +Weise: 1 +Etikett 1 +verwies 1 +(Teil 1 +kommentiertes 1 +unten). 1 +leistungsorientiert 1 +Strandhotel 1 +Avantgarde-Stil 1 +285 1 +Stockwerke 1 +Swasiland 1 +Matsapha 1 +Airport 1 +circa 1 +Manzini 1 +Kranke 1 +ländlichen 1 +afrikanischen 1 +Madagaskar 1 +versorgen 1 +Kapitän 1 +Leiter 1 +zusammengetan 1 +Kritisiert 1 +Äußerungen 1 +Mangelbegriff 1 +miteinbezogen 1 +31040 1 +Campo 1 +Pietra 1 +Salgareda 1 +(TV) 1 +obengenannte 1 +verdrehen 1 +exekutierbar 1 +akzeptabel 1 +wohlüberlegt 1 +ausgeschrieben 1 +65 1 +prägt 1 +Irene 1 +Spritzgießmaschinenhersteller 1 +ENGEL 1 +D-Box 1 +INTRO-Nasenleiste 1 +Rovingbelegung 1 +unterbreitete 1 +einzufrieren 1 +Endes 1 +reicheren 1 +aufbürden 1 +Fragestellerin 1 +Währungspolitik 1 +Wirtschaftsund 1 +Währungsbehörde 1 +105 1 +garantierter 1 +weitergehende 1 +Landerwerb 1 +rechtmäßiges 1 +Erwerb 1 +Ackerland 1 +Redner 1 +kompetent 1 +Schattensprache 1 +kontinuierlich 1 +Gesprächspartner 1 +argumentiert 1 +Komplexität 1 +sorgfältige 1 +Sonderkontingente 1 +Alternativen 1 +Kneipe 1 +vorf 1 +hrt 1 +Programmierer 1 +gefragt 1 +bettelnden 1 +erh 1 +rt 1 +Fettleibige 1 +womit 1 +verdoppelt 1 +kennt 1 +Dale 1 +Conde 1 +Saro 1 +Skytte 1 +Christoffersen 1 +technologisch 1 +gestützte 1 +Verkehrssicherheit 1 +Superbox 1 +attraktive 1 +herangetraut 1 +überrascht 1 +originelle 1 +interaktive 1 +Inhalte 1 +EVP-Fraktion 1 +mittelständische 1 +präziser 1 +Konzepte 1 +Binnenmarktes 1 +Best 1 +Western 1 +Shinjuku 1 +Astina 1 +Tokyo 1 +verpflichte 1 +meinerseits 1 +Sevilla 1 +Schlussbilanz 1 +Halbjahr 1 +offenem 1 +Privatterasse 1 +zuständige 1 +dreißigminütigen 1 +beschönigen 1 +Zorn 1 +zunichte 1 +Kulturagenda 1 +stärkeren 1 +Rechtsrahmen 1 +Verteilen 1 +Geldes 1 +merkwürdigen 1 +Aufgabenverteilungen 1 +abzeichnen 1 +Schlimmer 1 +noch: 1 +Innovationswachstum 1 +(SV) 1 +befürwortete 1 +Mitgliedsländer 1 +widmete 1 +fünfzehn 1 +Zen 1 +Französischen 1 +zögern 1 +feststellen 1 +versetzen 1 +Landbewohnerquartier 1 +rustikale 1 +Berghütte 1 +rundem 1 +St-Félix-de-Valois 1 +Lanaudière 1 +Österreicherin 1 +rascher 1 +voranzugehen 1 +vergewissern 1 +berichten 1 +Übergriffen 1 +Andersdenkende 1 +Erich 1 +Ludendorff 1 +1865 1 +Kruszczewnia 1 +Posen; 1 +1937 1 +Tutzing) 1 +Paul 1 +Hindenburg 1 +Heeresleitung 1 +(OHL) 1 +Weltkriegs 1 +Map 1 +Linien 1 +(Polygone) 1 +kartieren 1 +Fahrradrouten 1 +überlegen 1 +institutionellen 1 +stoßen 1 +Blauen 1 +Moschee 1 +Hagia 1 +Sophia 1 +byzantinischen 1 +verehrten 1 +Berliner 1 +Westeuropa 1 +DMS 1 +Webnologic-Produkten 1 +cityguide-dms.com 1 +Eröffnungsevent 1 +Einweihung 1 +Gleich 1 +öffneten 1 +Technologie-Tage 1 +Pforten 1 +3.000 1 +Fachbesuchern 1 +neuste 1 +Kundencenter 1 +Ausnahme 1 +Rechnungen 1 +vollzeitlich 1 +arbeite 1 +ausreichenden 1 +Vielmehr 1 +Feste 1 +umständehalber 1 +Naturveranlagte 1 +erfahrener 1 +Meister 1 +Unterwerfung 1 +Demütigung 1 +extreme 1 +dezidierte 1 +Hinwendung 1 +Kinoarbeit 1 +Gaudlitz 1 +Cineastenkreisen 1 +Kultstatus 1 +Standbeleuchtung 1 +Hiermit 1 +rufe 1 +sozialdemokratischen 1 +liberalen 1 +überdenken 1 +systematisch 1 +belogen 1 +Verknüpfung 1 +niederschreiben 1 +Element 1 +Führungsqualität 1 +bindend 1 +bloßen 1 +keines 1 +Spaziergängen 1 +Erklimmen 1 +259 1 +Limarò-Canyon 1 +3174 1 +Tosamassivs 1 +1682-84 1 +eindrucksvollen 1 +Turmhelm 1 +zurückhaltende 1 +unverdorbenen 1 +Ländlichkeit 1 +Sugano 1 +Bann 1 +(DA)... 1 +Letzt 1 +Dan 1 +Jørgensen: 1 +Mexiko 1 +Leistungsreserve 1 +Skepsis 1 +Durchführbarkeit 1 +ausräumen 1 +Gesundheitsgefährdungen 1 +entgegenzutreten 1 +Vernetzung 1 +Felder 1 +Rückrufnummer 1 +inakzeptabel 1 +bekräftigen 1 +anhängigen 1 +Besitzstand 1 +Finanzrahmen 1 +Einzelzimmer 1 +27,00 1 +45,00 1 +EF-111A 1 +Raven 1 +umgebaut 1 +frühen 1 +1990er 1 +allmähliche 1 +Einmottung 1 +Maintenance 1 +Regeneration 1 +(AMARC). 1 +Daneben 1 +Aktie 1 +erstmalig 1 +formalen 1 +Börsenhandel 1 +Quotation 1 +550 1 +überreichen 1 +Promessa 1 +Foundation.org 1 +etabliert 1 +www.promessa.se 1 +Europaabgeordneter 1 +Flut 1 +Zuschriften 1 +ausgelöst 1 +release 1 +branch) 1 +Versionszweig 1 +Zweig 1 +Versionsverwaltungssystem 1 +(siehe 1 +Branch 1 +]), 1 +Release 1 +Hauptentwicklung 1 +getrennt 1 +rege 1 +Wanderungen 1 +Praia 1 +Luz 1 +Landschaftsgarten 1 +gelernt 1 +reformgewillten 1 +Atempause 1 +gönnen 1 +Barceloneta 1 +S.L. 1 +speichert 1 +E-mail 1 +Passnummern 1 +Geschlecht 1 +Kontaktdaten 1 +Servicepersonal 1 +Bootsfahrten 1 +Skigebiete 1 +(gegen 1 +Gebühr). 1 +Endlich 1 +Deflation 1 +Rücken 1 +gefesselt 1 +Finanzminister 1 +Schäuble 1 +anscheinend 1 +Tür 1 +Integrationsrunde 1 +Finanzunion 1 +Mindestens 1 +Gefangenschaft 1 +umgekommen 1 +Berufsgruppen 1 +Graveure 1 +Flexografen 1 +Habitus 1 +restlos 1 +aufgesogen 1 +Berufen 1 +offenkundigsten 1 +Demokratiedefizit 1 +Tudor 1 +Zielsetzung 1 +Hauptzufahrtsstraßen 1 +DU 1 +kniest 1 +Stärke 1 +Rückgrats 1 +geäußert 1 +Gegenstimme 1 +Moreau 1 +-Spalten 1 +Aufzählungswert 1 +Künftig 1 +Vertragsstaat 1 +kontinuierlichen 1 +Überwachungsprozess 1 +Aufrechterhaltung 1 +Fischereitätigkeit 1 +handfeste 1 +Gestern 1 +60. 1 +fiele 1 +argumentieren 1 +Fernsehübertragung 1 +Joshua 1 +Nachmittag 1 +(Samstag) 1 +Palafiori 1 +Corso 1 +Garibaldi 1 +Sanremo 1 +Primo 1 +Prix 1 +gelato 1 +artigianale 1 +Jesaja-Rolle 1 +1QIsa 1 +Disziplin 1 +1QS 1 +Habakuk-Kommentar 1 +1QpHab 1 +vollständigsten 1 +Schriftrollen 1 +Tausenden 1 +Fragmenten 1 +Qumran 1 +Meers 1 +Jahrzehnten 1 +Kraftstoffleitungen 1 +u.a 1 +Direkt-einspritzung 1 +flüssigem 1 +konstantem 1 +Maximaldruck 1 +auseinandersetzen 1 +gemeinschaftlicher 1 +EIB 1 +Darlehensgarantien 1 +Einrichten 1 +Druckers 1 +konfigurieren 1 +(keine 1 +Windowsfreigabe) 1 +Drucker 1 +aufzunehmen 1 +Kontakt-Seite 1 +heimliche 1 +Wende 1 +Südkaukasus 1 +weiterzuentwickeln 1 +gediegenen 1 +Sandstein 1 +Vorfälle 1 +Zeitsind 1 +ermutigend 1 +Visionen 1 +KULTURHAUPTSTADT 1 +DES 1 +FÜHRERS 1 +kulturpolitischen 1 +1938 1 +Kontinuitäten 1 +Chisinau 1 +ertrinken 1 +oberster 1 +japanischer 1 +Küchenchef 1 +Sushimeister 1 +Fujii-san 1 +zaubert 1 +Makis 1 +Nigiris 1 +fangfrischem 1 +Tresen 1 +loggen 1 +Verwaltungsfunktionen 1 +zuzugreifen 1 +Geusthouse 1 +Hilton 1 +verklagte 1 +Salomon 1 +nachträglich 1 +zugesprochen 1 +Corsendonk 1 +Viane 1 +Apartments 1 +Turnhout 1 +Absolventen 1 +tierchiropraktischen 1 +Behandlungstechniken 1 +fachkundig 1 +beherrschen 1 +IAVC-Kollegen 1 +einziehbarer 1 +Bankscheck 1 +5% 1 +Aufenthaltwerts 1 +Patenkindes 1 +Mensch 1 +allerbesten 1 +Gefallen 1 +Verehrung 1 +Lobs 1 +zweiachsige 1 +Verbrennungstriebwagen 1 +Dos-à-dos 1 +-Längssitzbänken 1 +Europeana 1 +Katalog 1 +digitalisierten 1 +Werken 1 +Gesetzgeber 1 +grundlegender 1 +Ausgleich 1 +Entwicklungshilfe 1 +Miskole 1 +Bratislava 1 +ad 1 +hoc 1 +Fügen 1 +Brief 1 +hinzu: 1 +Diejenigen 1 +asymmetrischen 1 +Lenin 1 +products 1 +tools 1 +Geschlechter 1 +Arbeitsleben 1 +zurückkehren 1 +Quotensystem 1 +Klamt 1 +Allgäu-Tirol-Familien-Ski-Card 1 +81 1 +Liftanlagen 1 +Vitalen 1 +Liftkassa 1 +Reuttener 1 +Seilbahnen 1 +Erkunde 1 +Helicopter 1 +Bedauerlicherweise 1 +verwässert 1 +aktuellsten 1 +CAD-Programmen 1 +CAM-Herstellung 1 +Qualitätsniveau 1 +Testern 1 +Authentifizierungsschlüssel 1 +geschaffene 1 +schrift-exklusiven 1 +Umgebungen 1 +nachgewiesen 1 +Einberufung 1 +Grundrechtscharta 1 +Skiorte 1 +Pyrenäen 1 +ausgeliehen 1 +HESS 1 +kompetente 1 +ehrwürdigsten 1 +Hochschulen 1 +1364 1 +gegründete 1 +Jagiellonen-Universität 1 +PDF- 1 +XPS- 1 +DOC- 1 +XLS- 1 +PPT-Dateien 1 +gDoc 1 +stärkerer 1 +Fahrpreise 1 +LangstreckenBusfahrten 1 +allenthalben 1 +Ideologien 1 +ausgerufen 1 +Scheinwerfer 1 +Aluminiumprofilen 1 +zusammengesetzt 1 +Temperaturmessung 1 +vorwiegend 1 +PT100 1 +Umsatzsteuer 1 +(0.00%) 1 +Umsatzsteuernummer 1 +Gesetzes- 1 +Rechtslage 1 +Livechat 1 +damaligen 1 +Softlab 1 +Bernd 1 +Stroppel 1 +T-Systems-Einheit 1 +Finanz- 1 +Rechnungswesen 1 +Controlling 1 +Einkauf 1 +vereinfachen 1 +Kategorien 1 +aufgeteilt 1 +Miami 1 +River 1 +liefert 1 +Everglades 1 +fließt 1 +Atlantik 1 +baltischen 1 +unterwerfen 1 +AM-111 1 +Hörverlust 1 +verletztem 1 +Bogengang 1 +Mittelohrentzündung 1 +HNO-Klinik 1 +Universität 1 +Florida 1 +Gainesville 1 +Dekor 1 +konstruktives 1 +Fortsetzung 1 +Flüssig- 1 +Trockenei 1 +importieren 1 +umstrittenes 1 +Atomenergie 1 +raten 1 +Jahresabschlüsse 1 +anbringen 1 +Designer 1 +Laufsteg 1 +Lacroix 1 +Dior 1 +Armani 1 +US-Repräsentantenhaus 1 +District 1 +1970 1 +stimmberechtigten 1 +US-Senat 1 +Besonderheit 1 +mitwählen 1 +geänderten 1 +SAVE 1 +ALTENER 1 +umweltfreundlichen 1 +anregen 1 +voranzubringen 1 +Incotec 1 +maßgeschneiderte 1 +Eurobonds 1 +attraktiv 1 +streiten 1 +Kraftwerken 1 +Ausnahmeregelung 1 +NH 1 +Schiller 1 +40: 1 +43: 1 +Anschauen 1 +Höchstmengen 1 +Rückstände 1 +Atomwirtschaft 1 +Onlinedebatte 1 +Chemie 1 +Klimaschutz 1 +Granada 1 +Fuss 1 +entdecken 1 +Makler 1 +überwiegend 1 +Menschenrechtspolitik 1 +gegenübersteht 1 +Mitspieler 1 +Nanclus 1 +Darryl 1 +Henriques 1 +Gesundheits- 1 +Bildungssystem 1 +Lebensstandard 1 +verschlechtert 1 +83 1 +Dumbrava 1 +schönsten 1 +Hermannstadt 1 +EZColor 1 +Farbprofile 1 +Scanner 1 +SilverFast 1 +unterlegen 1 +Harald 1 +Kastlunger 1 +Gigant 1 +Phantasie 1 +künstlerischen 1 +Schaffen 1 +Lebenshaltung 1 +Grünbuch 1 +handelspolitischer 1 +Schutzinstrumente 1 +Ausgleich- 1 +Antidumpingmaßnahmen 1 +Schutzklauseln 1 +vielerlei 1 +Arándano 1 +Madrid 1 +gemütliche 1 +moderner 1 +Designereinrichtung 1 +saftigen 1 +Öl 1 +Schmutz 1 +Reibungswiderstand 1 +Eisenbahnen 1 +Neigungen 1 +Promillebereich 1 +Zahnstangenbetrieb 1 +abgesehen 1 +Profitieren 1 +Schönheits- 1 +Wellnessbereich 1 +Romantikhotels 1 +Partnerhotel 1 +Fischerwirt 1 +gegenüberliegenden 1 +(Nutzung 1 +Aufpreis). 1 +Ähnlich 1 +Restatements 1 +American 1 +Law 1 +gesetzesähnlichen 1 +Versicherungsvertragsrechts 1 +Fortbewegungsmöglichkeiten 1 +aufgreift 1 +unnötige 1 +Laststrom 1 +Motorwicklung 1 +Temperaturen 1 +Fair 1 +Value 1 +voraussichtliche 1 +Motors 1 +Gruppen- 1 +Seminarpreise 1 +personalisiertes 1 +maßgeschneidertes 1 +unterbreiten 1 +Alexandra 1 +Rousset 1 +Rennens 1 +do 1 +Palacio 1 +Ines 1 +Blumenmarkt 1 +kultivierten 1 +Witterung 1 +geschützten 1 +Ladeschleusen 1 +Ladebedingungen 1 +An- 1 +Auslieferung 1 +Reparatur 1 +Bussen 1 +Autosan 1 +H9-21 1 +H10 1 +Beispielen 1 +Individuum 1 +Spezies 1 +(Gattung 1 +Fall) 1 +regelmäßiges 1 +Backup 1 +versteht 1 +Ärzte 1 +gewöhnt 1 +Einwilligung 1 +vorsätzlichen 1 +Tötungen 1 +gedrängten 1 +hofft 1 +reisen 1 +Augenblick 1 +verbindliche 1 +seht 1 +heissesten 1 +dreckigsten 1 +MILF 1 +k 1 +nnte 1 +reifen 1 +Ladies 1 +geil 1 +hemmungslose 1 +Action 1 +bescheren 1 +Euren 1 +Bildschirmen 1 +dahinschmelzen 1 +Bonn 1 +53113 1 +Stockenstr 1 +Heusstadl 1 +verfügung 1 +Benennen 1 +xCubase 1 +"), 1 +Sequenzer 1 +Chinas 1 +anstatt 1 +Stärken 1 +ableitet 1 +Doppelmoral 1 +lenkt 1 +Aussteigen 1 +hinteren 1 +Busses 1 +Schloss-Keno-Tippschein 1 +ersucht 1 +Fuengirola 1 +grossteil 1 +spanisch 1 +Unterhaltungsangebot 1 +ausländischer 1 +verehrter 1 +redete 1 +Kein 1 +Airpass 1 +enden 1 +auszusprechen 1 +strafbare 1 +535 1 +Abraha 1 +aksumitischen 1 +Sklaven 1 +gestürzt 1 +Werben 1 +KollegInnen 1 +eTwinning 1 +finanzierte 1 +Aus- 1 +übertragbarer 1 +gelangt 1 +gemeinschaftlichen 1 +Organen 1 +Pentru 1 +juca 1 +Germana 1 +Portugheza 1 +gratuit 1 +sau 1 +mai 1 +multe 1 +casute 1 +aflate 1 +langa 1 +numele 1 +lectiilor 1 +tipul 1 +apasati 1 +butonul 1 +Incepe 1 +PatchManager 1 +relationalen 1 +verwaltet 1 +Pervenche 1 +Berès 1 +Recht: 1 +Investmentfonds-Branche 1 +potenziell 1 +katastrophalen 1 +Regionale 1 +befugt 1 +rundheraus 1 +abgewiesen 1 +anschließt 1 +Augusto 1 +Pinochet 1 +10. 1 +liefen 1 +Monti 1 +zurückgelegt 1 +Zehn 1 +ökonomischen 1 +Nachhaltigen 1 +Wirtschaftens 1 +Schweigeminute 1 +Jason 1 +Quinn 1 +einzulegen 1 +nacht 1 +schrecklichen 1 +ums 1 +Familienurlaub 1 +Tarsch 1 +Südtirol 1 +Bezeichnen 1 +220m 1 +Strecke 1 +Holzschnitzel 1 +Kiesel 1 +Lehm 1 +barfuss 1 +Härzlisee 1 +bewirteten 1 +Brunnihütte 1 +Konstruktion 1 +Ausklappen 1 +Fussstütze 1 +Rückenlehne 1 +hinten 1 +Hüttenausstattung: 1 +Kaltes 1 +warmes 1 +Kochplatte 1 +Küchenausstattung 1 +KabelTv 1 +Bettdecken 1 +Kissen 1 +Graefe 1 +Baringdorf 1 +unverantwortlich 1 +Verdünnungsfaktor 1 +Schwarze 1 +Crosswochenende 1 +Nordküste 1 +flach 1 +Quellfeld 1 +schillernde 1 +visuelle 1 +Umweltverschmutzung 1 +Publicity 1 +vergewaltigt 1 +optimal 1 +Disneyland 1 +Buffet-Essen 1 +überraschend 1 +Gegenwart; 1 +Letztendlich 1 +Vorhandensein 1 +betreffen 1 +funktionierendes 1 +Genehmigungssystem 1 +Angebot: 1 +empfehlen: 1 +Bestreben 1 +Senioren 1 +niedrigen 1 +Renten 1 +unsicheren 1 +bezahlten 1 +Wesentlichste 1 +Kleines 1 +Antonio 1 +Gramsci 1 +Briefen 1 +Völkern 1 +entgegengehen 1 +Lernbedürfnisse 1 +unterschiedlichster 1 +Kundengruppen 1 +Kindergärten 1 +Krankenhäusern 1 +Sicherheitseinheiten 1 +Getränkeautomaten 1 +Hotel- 1 +Gaststättengewerbe 1 +Hübner 1 +Kohäsionsbericht 1 +bunt 1 +Sportmedien 1 +Auge 1 +atemberaubend 1 +Kontinents 1 +schärfen 1 +Register 1 +blog 1 +Ablenkung 1 +pneumatischen 1 +Spritzrohr 1 +langsamer 1 +Revisionen 1 +CVS 1 +Sobald 1 +Email 1 +festgelegten 1 +Netzwerkverbindung 1 +PSP 1 +-Systems 1 +manchen 1 +Reprojektion 1 +groß 1 +dauerhaft 1 +vorhersehbar 1 +wärmeren 1 +Jahrhunderten 1 +effektivste 1 +Problemstellung 1 +Tools 1 +angepaßt 1 +Updates 1 +Genauso 1 +aufwenden 1 +staunen 1 +erdenklichen 1 +Digitalfotografie 1 +Fünffachzoom 1 +Schärfegarantie 1 +satte 1 +postertaugliche 1 +Auflösung 1 +hochwertiges 1 +markantem 1 +S1060 1 +Schmuckkästchen 1 +Wundertüte 1 +fixiert 1 +Mittelmeers 1 +Freihandelsabkommen 1 +paradoxerweise 1 +liberalisieren 1 +gemeinwohlorientierter 1 +Vorortverkehre 1 +(Commuter 1 +Trains) 1 +Erie 1 +Lackawanna 1 +(EL), 1 +Pennsylvania-Reading 1 +Seashore 1 +Lines 1 +(PRSL) 1 +Penn 1 +(PC) 1 +bezuschusst 1 +Privacy: 1 +personenbezogene 1 +größtmöglicher 1 +Vertraulichkeit 1 +neunziger 1 +Intellektuelle 1 +ständigem 1 +Angriffen 1 +12.00 1 +gemeldet 1 +anhaltende 1 +schlimme 1 +West-Papua 1 +Regierungsprozess 1 +einbeziehen 1 +EU-Statistik 1 +normalen 1 +Vertrauensniveau 1 +Fraktionskollegen 1 +Pöttering 1 +Finnische 1 +eingecheckt 1 +Sandstränden 1 +Ozean 1 +blühender 1 +Besichtigungstouren 1 +Auftritt 1 +M 1 +Felgen 1 +Dringlichkeitsthemen 1 +aktiveren 1 +Zentralasien 1 +Vereichnis 1 +c: 1 +ftpboot 1 +\ 1 +fielen 1 +Mücken 1 +blauäugig 1 +Konzeptes 1 +sicherheitskritische 1 +basierenden 1 +generischen 1 +XML-Formates 1 +Testspezifikationen 1 +Testergebnissen 1 +Make 1 +sexuellen 1 +Tätowierung 1 +Ansonsten 1 +Parteitage 1 +Ausgedehntheit 1 +effizient 1 +IT 1 +business-kritischen 1 +Abläufe 1 +Logistik 1 +IT-Partner 1 +outgesourct 1 +Redezeit 1 +Kameras 1 +Arbeitsbeziehung 1 +D.C. 1 +www.pmi.org 1 +PRINCE2 1 +Speisekarte 1 +ausgezeichnete 1 +Fischspezialitäten 1 +traditionelle 1 +dalmatinische 1 +Weinen 1 +Sir 1 +Leon 1 +Brittan 1 +konsultiert 1 +klargemacht 1 +Genitalverstümmelungen 1 +unannehmbar 1 +derartiger 1 +Übergriff 1 +Schäden 1 +Aussäen 1 +Gräsern 1 +Klee 1 +Feldgrenzen 1 +Rosenhecken 1 +Besitz 1 +Reiher 1 +Wassersümpfen 1 +Besen 1 +Vernichtung 1 +wegfegen 1 +Heerscharen 1 +Privatinvestitionen 1 +verlangsamen 1 +Löhnen 1 +senkt 1 +eingehenderem 1 +offenbaren 1 +Zweideutigkeit 1 +Realitätsferne 1 +5,00 1 +Haarlem 1 +Stränden 1 +Stationen 1 +schnitt 1 +Minuten), 1 +Minuten). 1 +Surflehrer 1 +Talents 1 +großartige 1 +Persönlichkeiten 1 +Pornografie 1 +bedenklich 1 +Private 1 +75,00 1 +Anlaß 1 +anzuknüpfen; 1 +Maquisapayoj 1 +erhöhte 1 +Übernachtungsplattform 1 +Tapire 1 +beobachtet 1 +inmitten 1 +dichten 1 +Primärwaldes 1 +Ding 1 +static 1 +rigging 1 +hochgezogen 1 +Spinnenetz 1 +eingewoben 1 +aufgehängt 1 +Immobilien-Suchmaschine 1 +Objekten 1 +finden: 1 +Möblierte 1 +nichtmöblierte 1 +Preisen 1 +Luxuswohnungen 1 +Wohnungsgemeinschaften 1 +Videokonvertierung 1 +Macs 1 +müßten 1 +einfallen 1 +Foyers 1 +seitlich 1 +Plenarsaals 1 +Pietikäinen 1 +erschöpfend 1 +überstürzt 1 +Textil- 1 +Bekleidungsindustrie 1 +Bananenproblematik 1 +leitete 1 +diverse 1 +Teilprojekte 1 +Großveranstaltungen 1 +koordinierte 1 +Werbemittel 1 +Division 1 +Day 1 +Rund 1 +DDD 1 +Pool 1 +hervorragender 1 +Simulator 1 +virtuellen 1 +Anwendern 1 +gerühmt 1 +modularen 1 +EZ 1 +attraktiven 1 +Wasserzugang 1 +*) 1 +Instandhaltung 1 +zusammenfließen 1 +Koordination 1 +Marktüberwachung 1 +Preisgarantie 1 +haltbaren 1 +Sonderangebote 1 +Frische-Artikel 1 +kühlpflichtig 1 +Großhandelspreise 1 +höheren 1 +einkommensschwachen 1 +mittlerem 1 +Hochlohnländern 1 +gesteckten 1 +Fluggesellschaften 1 +Freikaufen 1 +Estudio 1 +Versicherungspolice 1 +ausgearbeitet 1 +Kursteilnehmern 1 +(von 1 +75 1 +J.) 1 +Versicherungsschutz 1 +befremdet 1 +brutale 1 +Zugangs 1 +betreffendes 1 +Low 1 +% 1 +-Run 1 +aufzusammeln 1 +darfst 1 +Display 1 +TZ7 1 +HDMI-Schnittstelle 1 +Monitor 1 +anschließen 1 +Verfahrens 1 +Sachlage 1 +Koordinationsausschusses 1 +4-Zimmer- 1 +Neubauwohnungen 1 +erfreuen 1 +steigender 1 +UART 1 +0x80 1 +0xff 1 +Fonts 1 +InternalRAM 1 +mache 1 +Entwicklungsprojekten 1 +übergreifenden 1 +ID: 1 +PDI 1 +20100908 1 +PDI5153 1 +Credit: 1 +Look 1 +picturedesk.com 1 +Date: 1 +08.09.2010 1 +Title: 1 +Caption: 1 +FIRST 1 +LOOK 1 +FLO 1 +Neues 1 +umweltfeindlichen 1 +antieuropäischen 1 +gängigen 1 +Untersuchungen 1 +Qualitätssicherung 1 +fertigen 1 +Asphaltschichten 1 +zerstörungsfreie 1 +Untersuchungsmethoden 1 +Troxler-Sonde) 1 +Einbaubedingungen 1 +Noch 1 +kämpften 1 +vermittelt 1 +Freihandel 1 +verschärfenden 1 +umkehrbar 1 +Echosignale 1 +McAvan 1 +Olympus 1 +Wartung 1 +Fehlerbehebung 1 +minimaler 1 +Ausfallzeit 1 +optimaler 1 +Betriebszeit 1 +Account 1 +anzumelden 1 +vooch 1 +Konto 1 +verknüpfen 1 +direkter 1 +legislativer 1 +Direktoriumsmitglieder 1 +ausgeübt 1 +Haushaltsausschuss 1 +Globalisierungsfonds 1 +Überwindung 1 +infolge 1 +Verlusts 1 +gründlich 1 +Mercedes 1 +stattet 1 +Plakatkampagne 1 +GLK 1 +Riesenposter 1 +beamzones 1 +Wohnungen 1 +Stadtkern 1 +offentlichen 1 +Ausstellungsbereich 1 +c 1 +pop 1 +Professionals 1 +Kreativwirtschaft 1 +sympathische 1 +geraumer 1 +erfolgreicher 1 +operationell 1 +energiebedingter 1 +Sicherheitsprobleme: 1 +Einerseits 1 +Energienutzung 1 +andererseits 1 +bloeiende 1 +perelaar 1 +Blühende 1 +Birnbaum) 1 +Varianten 1 +verschieden 1 +Gefühlsfrische 1 +feinen 1 +Tonart 1 +u.E. 1 +Meisterschaft 1 +pure 1 +poetische 1 +Emotion 1 +erhaben 1 +Direct-Entry 1 +Casting 1 +eingeladen 1 +Jury 1 +Preispaket 1 +100.000 1 +erwünschte 1 +erwartete 1 +eigenständiges 1 +wirtschaftspolitisches 1 +Durchschnitt 1 +Eurozone 1 +sank 1 +aufrechtzuerhaltenden 1 +Baubooms 1 +einstelliges 1 +Binäre 1 +Wahrnehmungs- 1 +Denkkorsette 1 +translatorischen 1 +Mehrpoligkeit 1 +abgeworfen 1 +Generika-Sektor 1 +Krönung 1 +engstirniger 1 +Bedeutung: 1 +Differenzen 1 +Irakdebatte 1 +derart 1 +offenkundig 1 +Genugtuung 1 +ausführt 1 +Rückseite 1 +Gerätes 1 +Befestigung 1 +Horizont 1 +engeren 1 +Lateinamerika 1 +VFX 1 +Buildern 1 +Farbverlauf 1 +Seitenrahmens 1 +direct 1 +CTI-Lösung 1 +automatisiert 1 +Theorien 1 +konzipieren 1 +AWF-Fehlern 1 +RWT-Fehlern 1 +Sagen 1 +Pyrrhussiege 1 +Baum 1 +unterhalb 1 +Ast 1 +Flüchtlingsdilemma 1 +fließen 1 +Instrumentarium 1 +PHARE-Programm 1 +Empfängerseite 1 +Jesus 1 +Propheten 1 +bezeugt 1 +Großbritanniens 1 +Bundesrepublik 1 +Aznar 1 +Selbstbehandlung 1 +Depressionen 1 +Schlafstörungen 1 +Selbstmedikation 1 +Johanniskraut 1 +Schmerzmitteln 1 +Schlafmitteln 1 +Balzan-Stiftung 1 +1956 1 +Lugano 1 +Angela 1 +Lina 1 +beträchtliche 1 +Vermögen 1 +Vater 1 +Eugenio 1 +geerbt 1 +Andenkens 1 +überliess 1 +Sambia 1 +benannt 1 +Fluss 1 +Zambesi 1 +weiter: 1 +Grundstücke 1 +eingeteilt 1 +Erbteilen 1 +trachten 1 +überraschenderweise 1 +ungarische 1 +BSE 1 +meinten 1 +Tierkörper 1 +Serbiens 1 +vorbereiteten 1 +Artikeln 1 +119 1 +Suchmaschinenoptimierung 1 +signifikanter 1 +Online 1 +Marketings 1 +Berücksichtigen 1 +Whitestream-Projekt 1 +(Kaspisches 1 +Meer-Georgien-Schwarzes 1 +Meer-Ukraine-Rumänien)? 1 +Vorwand 1 +Profite 1 +AroMed 1 +befüllt 1 +Intervallen 1 +Aufheizen 1 +Befüllen 1 +Terrorangriffe 1 +Tabu 1 +heranwagen: 1 +Fans 1 +128,9 1 +hauseigenen 1 +Fahrrädern 1 +Motorrollern 1 +Faust 1 +übernahmen 1 +Variation 1 +englischen 1 +angeglichen 1 +crabes 1 +3-Sterne-Superior-Hotel 1 +Lahn 1 +Kurparks 1 +einschlägigen 1 +Energiesektor 1 +nachhaltiger 1 +grüner 1 +Klimaschutzziele 1 +Live-Stream 1 +SIM 1 +Goethe-Instituts 1 +SL 1 +mitverfolgen 1 +18.00 1 +CEST 1 +17.00 1 +Ortszeit 1 +Mehrjährigen 1 +Ausrichtungsprogramme 1 +Corbett 1 +ĺñigo 1 +Méndez 1 +Vigo 1 +Partnern 1 +Parlamenten 1 +dialogfähig 1 +ALDE-Fraktion 1 +(FI) 1 +Schmitt 1 +annehmbare 1 +Alternative 1 +Kapverden 1 +Rica 1 +dualen 1 +Ausbildungsgangs 1 +angerechnet 1 +jüngste 1 +Pakistan: 1 +wachsende 1 +Intoleranz 1 +wovon 1 +hörten 1 +Fehlen 1 +Ehrenmorde 1 +zurückgeben 1 +konstruktive 1 +Afghanistan 1 +Anfänge 1 +lagen 1 +fehl 1 +argumentierten 1 +westlichen 1 +Rationalismus 1 +Stätten 1 +greifbares 1 +Kulturerbe 1 +medizinische 1 +Umfeldlogistik 1 +engagierten 1 +erfahrenen 1 +Medizinern 1 +Ferienort 1 +Baqueira 1 +Skilehrer 1 +3-Sterne 1 +Ayamonte 1 +suite 1 +Dumra 1 +(etwa 1 +halbe 1 +Strecke) 1 +Besisahar 1 +Besondere 1 +Kreditaufnahmen 1 +Eigenheimsektor 1 +Karriere 1 +Hubert 1 +Gerstmayr 1 +Finanzabteilung 1 +Walter 1 +International 1 +wechselte 1 +Prisma 1 +Kreditversicherungs 1 +NetImage 1 +Bildverarbeitung 1 +diesbezüglicher 1 +gewidmet 1 +Fordern 1 +unverbindliches 1 +Urlaubsangebot 1 +Ausnahmen 1 +restriktiv 1 +Sison 1 +9,7% 1 +Abweichungen 1 +Beschaffenheit 1 +eingesetzten 1 +Materials 1 +Zulieferanten 1 +entgegnen 1 +Davon 1 +höre 1 +überzeugen 1 +• 1 +interessierte 1 +Kursinhalte 1 +e-learning 1 +Jedes 1 +Rabattcode 1 +mittelfristig 1 +strategisches 1 +angesehen 1 +Institutionelle 1 +Zielsetzungen 1 +kürzer 1 +viertel 1 +Letzteren 1 +demokratische 1 +Legitimation 1 +Wolle 1 +landwirtschaftlichem 1 +Erzeugnis 1 +Textilsektor 1 +Wollerzeugung 1 +Klimapolitik 1 +Post- 1 +Kurierservice 1 +Auskünften 1 +strukturiert 1 +klimatisierten 1 +Gnokii 1 +einfacher 1 +zuverlässiger 1 +SyncML 1 +FAll 1 +Uneindeutigkeiten 1 +Ambiguitäten 1 +Künstlerin 1 +zutage 1 +Reflektionsweisen 1 +117 1 +Lehrlinge 1 +Bundesländern 1 +Bundeslehrlingswettbewerb 1 +Tourismusberufe 1 +GAST 1 +Können 1 +Bombardierung 1 +Tanklastwagen 1 +Afghanistans 1 +Bundeswehr 1 +Hagel 1 +Alliierten 1 +Smet 1 +Arbeitszeitgestaltung 1 +Menrad 1 +Ziel-1-Region 1 +strenger 1 +Beibehaltung 1 +-Kriteriums 1 +statistisch 1 +verliert 1 +großzügiges 1 +Phasing-out 1 +Entwicklungsprobleme 1 +systematischen 1 +Kompetenzmanagements 1 +bewältigen 1 +Einbindung 1 +Produktionsabläufe 1 +transnationale 1 +Wertschöpfungsprozesse 1 +Berichterstattern 1 +Azzolini 1 +Riis-Jørgensen 1 +Motorrad 1 +Mountainbikevermietung 1 +Gomera 1 +schließlich: 1 +Steuerharmonisierung 1 +blockieren 1 +Module 1 +Downloadbereich 1 +Drupal-Module 1 +Notizen 1 +Vertretern 1 +Vertreters 1 +Geldern 1 +Grundstücken 1 +Wohnraumbeschaffung 1 +Maastrichter 1 +Kriterien 1 +Située 1 +Médina 1 +Essaouira 1 +Dar 1 +Qu 1 +Ciel 1 +invite 1 +profiter 1 +sérénité 1 +qui 1 +se 1 +dégage 1 +cette 1 +ancienne 1 +maison 1 +entierrement 1 +rénovée 1 +Verschreibung 1 +behandelnden 1 +Wartezeiten 1 +Partie 1 +-1 1 +errno 1 +Argumente 1 +Stabilitätspakt 1 +handhaben 1 +Konjunkturzyklen 1 +appraisal 1 +valuation 1 +Schottland 1 +10.10 1 +unterbrochen 1 +10.35 1 +FГ 1 +¤ 1 +higkeit 1 +fГјr 1 +gegebene 1 +Schnitt 1 +BerГјcksichtigung 1 +SchГ 1 +¶ 1 +nheiten 1 +Hauptkomponenten 1 +Tischlerkunst 1 +Online-Marketings 1 +Ausschreibungen 1 +betrachten 1 +Familienskirennen 1 +Hasliberg 1 +Sollen 1 +Norton 1 +Security 1 +Malware 1 +PUPS 1 +(Possibly 1 +UnPopular 1 +Software) 1 +Hilft 1 +Beinen 1 +Restless 1 +Legs 1 +Syndrom 1 +(RLS), 1 +Hitzewallungen 1 +Rückenschmerzen 1 +Parlamentspräsidenten 1 +übermitteln 1 +Geschäftstätigkeiten: 1 +Führende 1 +Software-Entwicklungsfirma 1 +patentierte 1 +Dynamic 1 +vertreibt 1 +Web-Service 1 +Hoteliers 1 +Auskunft 1 +Brava 1 +Board-Administration 1 +Dateitypen 1 +Wohnung 1 +Formalitäten 1 +Einblick 1 +Büro-Hochhaus 1 +Zeitungen 1 +transatlantische 1 +familiär 1 +geführtem 1 +Gasthof 1 +Summe 1 +Halbtönen 1 +(z. 1 +do-re) 1 +Intervalle 1 +existieren 1 +Dur 1 +Moll 1 +fair 1 +übermäßige 1 +verminderte 1 +hinterfragt 1 +Spezialisierung 1 +Design-Revolution 1 +Neuerfindungen 1 +Raumschiff 1 +steuern 1 +Erlass 1 +Sonderdarlehen 1 +schwächsten 1 +Lomé-Abkommen 1 +ÖRK 1 +Kirchen 1 +gemeinsamem 1 +spirituellem 1 +theologischer 1 +Reflektion 1 +zusammenzukommen 1 +Hinweisen 1 +Memoiren 1 +autorisierten 1 +Transportunternehmen 1 +Gefahrguttransport 1 +Strahlenquellen 1 +Kohäsionspolitik 1 +Temperatur 1 +abschaltbare 1 +Vereinigung) 1 +Rohrleitung 1 +Kühlschrankes-Kondensators 1 +abgeschaltet 1 +herausgezogen 1 +beladene 1 +EGO 1 +North 1 +Sails 1 +Kontroll- 1 +Bewertungsverfahren 1 +inwieweit 1 +Aufgabenbeschreibung 1 +fortlaufenden 1 +Vorausplanung 1 +Jahresarbeitsplan 1 +vorgegebenen 1 +Bangladesch 1 +Musterschüler 1 +Diesen 1 +anerkennen 1 +(La 1 +séance 1 +levée 1 +50) 1 +Marktsegment 1 +vorenthalten 1 +geblieben 1 +Belebung 1 +Ernährungssektors 1 +traditioneller 1 +Verbrauchern 1 +hochwertige 1 +Volksvertreter 1 +Erhebung 1 +erlassen 1 +Wachstumsmöglichkeiten 1 +serbische 1 +Post 1 +T 1 +SAP 1 +Implementierungspartner 1 +unternehmensweite 1 +Business-Transformation-Strategie 1 +Lieferqualität 1 +schmerzfreien 1 +Ruheraum 1 +offerierten 1 +Gipfeli 1 +Heimweg 1 +Diskriminierungen 1 +israelischer 1 +Rechten 1 +arabische 1 +Konkretisierung 1 +spreche 1 +Berlusconi 1 +sechsmonatiger 1 +Fiasko 1 +endete 1 +kommenden 1 +Aktuelle 1 +Bewertungen 1 +Oberndorf 1 +Tirol 1 +Tarif 1 +(CA) 1 +Shanghai 1 +(FM) 1 +Festlands 1 +Bewährt 1 +Reihendoseure 1 +Modellreihe 1 +RD 1 +erlangte 1 +KŠB 1 +Renommee 1 +unabhängigen 1 +Kanzlei 1 +Anwaltsfirmen 1 +konkurrenzfähig 1 +Kakaopflanze 1 +(theobroma 1 +cacao) 1 +Sterkulazeen 1 +gedeiht 1 +klimatischer 1 +feuchtwarmer 1 +Umgebung: 1 +Kamerun 1 +Elfenbeinküste 1 +Ghana 1 +Malaysia 1 +Theater 1 +Piccadilly 1 +Covent 1 +Graden 1 +Nordamerika 1 +Volumenmärkten 1 +hochwertigen 1 +Qualitätsflachstahl 1 +Camaleon 1 +Sports 1 +activen 1 +Fahrrad 1 +Treckingtouren 1 +Cadiz 1 +Vorhänge 1 +Tapeten 1 +feingewebtem 1 +Brokat 1 +hüllen 1 +strahlendes 1 +Ambiente 1 +Fenstern 1 +gewisse 1 +einzelstaatlichen 1 +abzubauen 1 +ungleichen 1 +Personen- 1 +Güterverkehrsmarkt 1 +Skype 1 +Ermessen 1 +API-Bedingungen 1 +hierunter 1 +jedwede 1 +Drittpartei 1 +abtreten 1 +getreten 1 +Glaube 1 +Verantwortungsmechanismus 1 +Erlebnisse 1 +Sicherheitsbehörden 1 +andernorts 1 +liefern 1 +vergleichbarer 1 +Rechenschaftspflicht 1 +begangenen 1 +Ausschluß 1 +Erzeugers 1 +gemeinhin 1 +Entwicklungsrisiko 1 +dvdisaster 1 +Betriebssysteme 1 +Darwin 1 +Mac 1 +OS 1 +FreeBSD 1 +GNU 1 +NetBSD 1 +Verspätung 1 +Tierschutz 1 +Produktqualität 1 +zufriedenstellend 1 +Helsinki 1 +Kerns 1 +weist 1 +greifbaren 1 +Unionsbürgerschaft 1 +Freizügigkeit 1 +komfortabel 1 +separatem 1 +Ferienvilla 1 +sämtlichen 1 +potentielle 1 +Spender 1 +5. 1 +Newsgroup 1 +Telefonische 1 +Gebote 1 +Auktionssaal 1 +anwesende 1 +Telefonist 1 +Bieter 1 +ausgeführt; 1 +Hampel 1 +Tagespreis 1 +9,00 1 +abgezogen 1 +klinischen 1 +Seminar 1 +Bilddiagnostik 1 +Zahn- 1 +Periodonterkrankungen 1 +zustimmt 1 +niedrigere 1 +jemandem 1 +Mittelmeerraum 1 +Gläubiger 1 +zwanzig 1 +Befugnis 1 +Embargo 1 +aufzuheben 1 +Schuldners 1 +Jahresende 1 +Inflation 1 +durchschnittlich 1 +1,95% 1 +Hauptwirtschaftszweig 1 +Land- 1 +Forstwirtschaft 1 +kulturelles 1 +biologische 1 +Fadenkreuz 1 +Kamerasymbol 1 +Bildes 1 +Bugtraq-Datenbank 1 +SecurityFocus): 1 +BugTraq 1 +ID 1 +18034 1 +Firefox 1 +kluge 1 +verschenkt 1 +biregionalen 1 +beleben 1 +Dimensionen 1 +andinen 1 +mittelamerikanischen 1 +Kritisch 1 +Leitlinien 1 +vorgegeben 1 +seinerzeit 1 +García-Margallo 1 +Marfil 1 +Macartney 1 +schlägt 1 +fakultativ 1 +erklärten 1 +Endziel 1 +Seitenschalen 1 +Spange 1 +verschiebbar 1 +Dominanz 1 +Bruttosozialprodukt-Eigenmittel 1 +gemischten 1 +Gefühlen 1 +Manpages 1 +Jerusalem 1 +Westjordanland 1 +Katastrophenfall 1 +baumgesäumten 1 +Prachtstraßen 1 +Plätze 1 +beschwören 1 +vergangener 1 +herauf 1 +Aggregate 1 +Kühlräume 1 +Klimabedingungen 1 +Motion 1 +Control 1 +SINUMERIK 1 +840Di 1 +Volltextsuche 1 +geschicktes 1 +Timing 1 +Freunde 1 +Disneys 1 +Schneewittchen 1 +Sog 1 +Deckung 1 +glänzt 1 +facettiertem 1 +Kristall 1 +Erst 1 +Grobblechstraßen 1 +Steckelwalzwerke 1 +Islam 1 +Debüt 1 +Filmprojekt 1 +Terminator 1 +abzuwickelnden 1 +Mittelbindungen 1 +belaufen 1 +Außenhilfen 1 +Vorbeitrittsinstrumenten 1 +Überführung 1 +Interoperabilität 1 +vergißt 1 +Erholungszentrums 1 +Freizeitverbringung 1 +Hilary 1 +Hauptrolle 1 +Remake 1 +60er 1 +Klassikers 1 +Bonnie 1 +Clyde 1 +definitiv 1 +Gedicht 1 +(so 1 +Sänger 1 +Lied 1 +Gruppennamen 1 +Muvrini 1 +Darin 1 +Neuigkeiten 1 +Asturforesta 1 +Anmeldungen 1 +Schwarzes 1 +Brett 1 +Highlight 1 +Salzbutter 1 +Butter 1 +Reformierung 1 +Arbeitsweise 1 +professioneller 1 +offener 1 +demokratischer 1 +sechsten 1 +Parameter 1 +rechtsverbindlich 1 +Garantien 1 +Abnahmequalität 1 +Sekretär-Service 1 +Fremdenführungen 1 +Autoverleih 1 +Einzelreisende 1 +Schwarzwälder 1 +Spezialitäten 1 +Evolutionsbiologie 1 +vs. 1 +Kreationismus 1 +siehe 1 +Evolutionstheorie). 1 +zog 1 +DKK 1 +50,00 1 +Dingen 1 +leichtes 1 +behindertenfreundliche 1 +schallisolierte 1 +Oslo-Gardermoen 1 +Trysilekspressen 1 +Veolia 1 +Trysil 1 +Zivilisation 1 +rumänische 1 +gehende 1 +Ratifikation 1 +Warenverkehr 1 +Eröffnung 1 +Mehrzweckarena 1 +herausgegebene 1 +CD-ROM 1 +Carl 1 +Schubert 1 +Promotion 1 +Wirtschaftlichkeit 1 +Weinbaus 1 +Steilhang 1 +leitet 1 +auswählen 1 +Grundinformationen 1 +umfassenden 1 +Interessanterweise 1 +verfolgte 1 +Wahlplattform 1 +Hamas 1 +Ansätze 1 +palästinensischen 1 +veranstaltete 1 +Anhörung 1 +Eingliederung 1 +arbeitmarktfernen 1 +Gelb- 1 +Rotfilter 1 +SW-Fotografie 1 +Blattwerk 1 +verdunkeln 1 +Grünfilter 1 +Wiesen 1 +Blätter 1 +aufhellt 1 +Besteller 1 +Dolmetschen 1 +Bestellung 1 +angeführt 1 +destabilisieren 1 +Nordkaukasus 1 +Konflikt 1 +Hauptziel 1 +Bestellungen 1 +Endpreis 1 +Natürliche 1 +warme 1 +Erdtöne 1 +Deckenventilator 1 +Bett 1 +Schreibtisch 1 +besetzten 1 +zionistischen 1 +Idealen 1 +Gebieten 1 +Koexistenz 1 +nachbarlichen 1 +eingeholt 1 +Industrieländer 1 +2177 1 +gleichgestellt 1 +BBC 1 +Putschisten-Regierung 1 +Porfirio 1 +Lobo 1 +Sosa 1 +Honduras 1 +Verfechter 1 +Rechtsstaatlichkeit 1 +Aquarelys 1 +Planungs- 1 +Ausführungsetappe 1 +fristgerechte 1 +Bauabwicklung 1 +Ästhetik 1 +Xilisoft 1 +iPod 1 +Rip 1 +simple 1 +Schnittstelle 1 +Festnahmen 1 +Staatsstreiche 1 +planen 1 +Morden 1 +sein: 1 +Ergenekon-Affäre 1 +wahrscheinlichsten 1 +Operation 1 +Gegossenes 1 +Blei 1 +forderte 1 +tausend 1 +Gaza-Streifens 1 +dreizehn 1 +Opfern 1 +israelischen 1 +Streitkräften 1 +Organisationseinheit 1 +verbreitete 1 +Klischee 1 +Metal-Fans 1 +Haaren 1 +Lederklamotten 1 +Jeans-Kutte 1 +Bandlogos 1 +trifft 1 +verallgemeinernd 1 +PostZertifikat 1 +Schweizerischem 1 +Signaturgesetz 1 +Postzertifikate 1 +natürliche 1 +mangelnder 1 +weitreichende 1 +teuer 1 +vergessenen 1 +Hüttern 1 +aufgesuchenen 1 +Plätzen 1 +wohin 1 +Vögel 1 +Hirschkühe 1 +Beruhung 1 +Appartements 1 +Bungalowen 1 +Slots 1 +Beinahe 1 +Herbst 1 +Statistiktage 1 +SSS 1 +mitgetragen 1 +Obama-Administration 1 +Smart 1 +revolutionären 1 +Mittleren 1 +hinzuweisen 1 +Hard 1 +Soft 1 +kombinieren 1 +Diskussionsbedarf 1 +verständlicherweise 1 +vorbehalten 1 +nordöstlich 1 +Novi 1 +Vinodolski 1 +Vizepräsident 1 +personenbezogener 1 +Unionsbürgern 1 +Drittländer 1 +Erzeugung 1 +Aufklärungsbemühungen 1 +begriffen 1 +Herender 1 +Porzellanmanufaktur 1 +Erfolgsdekor 1 +Erstauftraggeber 1 +Gedenken 1 +sterben 1 +sauberem 1 +Trinkwasser 1 +sanitären 1 +legalisiert 1 +EU-Recht 1 +gewählten 1 +Organ 1 +Beschaffungspolitik 1 +einzuführen 1 +Werkzeugleisten 1 +einrichten 1 +schicke 1 +gewesen 1 +neugierig 1 +RCS 1 +moderiertes 1 +Tipps 1 +Persönliches 1 +Kenntnisse 1 +RCS-Produkte 1 +auszutauschen 1 +produktiver 1 +angenehmer 1 +ThyssenKrupp 1 +Logistik- 1 +Anarbeitungszentrum 1 +14.000 1 +Aluminiumplatten 1 +sägen 1 +fräsen 1 +EyeConnect 1 +Barack 1 +beisteht 1 +eintreten 1 +Sicherheitsvorkehrungen 1 +vertrauliche 1 +Bearbeitung 1 +übermittelten 1 +zirkuläre 1 +wehen 1 +ausgehängte 1 +Biker-Outfits 1 +Balkons 1 +Fahnen 1 +Tafelwein 1 +Luftseilbahn 1 +Kräbel 1 +Arth-Rigi-Bahn 1 +Rigi-Scheidegg 1 +durfte 1 +Anfängen 1 +Sky 1 +House: 1 +Putzen 1 +Kochen 1 +Tanzen 1 +Malen 1 +Satsang 1 +meldet 1 +Startseite 1 +XAMPP 1 +örtliche 1 +Konflikts 1 +darstellen 1 +heizen 1 +kühlen 1 +fern 1 +Augenkontakt 1 +ausspülen 1 +abwegig 1 +MEP 1 +honorable 1 +Kommunen 1 +Belang 1 +stilvolle 1 +Marylebone 1 +Oxford 1 +Desweiteren 1 +Cafes 1 +Cave 1 +cooperative 1 +(für 1 +Wein) 1 +Arena 1 +Abwegig 1 +Finanzaufsichtsbehörden 1 +häufigsten 1 +kritisiert 1 +Schuld 1 +versagen 1 +redlich 1 +Mauren 1 +betrieb 1 +Manuel 1 +Ungnade 1 +1514 1 +Staatsdienst 1 +Hauptsache 1 +Sportler 1 +fühlte 1 +konsultieren 1 +eintragen 1 +vorbei 1 +Laden 1 +Tavernen 1 +Fischerhafen 1 +Sofort 1 +spürst 1 +Wagen 1 +Untergrund 1 +fährt 1 +1.8 1 +Sprites 1 +Zweite 1 +Hume 1 +Uns 1 +Irischen 1 +Hungersnot 1 +Versicherungspolicen 1 +Ungleichheitsrisiko 1 +Marktwertes 1 +Eigenheimes 1 +REACH-Verordnung 1 +Zulassungsverfahren 1 +besorgniserregende 1 +Senden 1 +elementarste 1 +feilen 1 +Weiterentwicklung 1 +Amateur-Radio-Sitzungen 1 +2008. 1 +Poggibonsi 1 +Nord 1 +(5. 1 +Ausfahrt). 1 +1878 1 +Montenegros 1 +1946 1 +Arbeitgeber 1 +Gewerkschaften 1 +ängstigen 1 +Möbliert 1 +(Sofa 1 +Sessel 1 +Schrank 1 +DVD 1 +Gasheizung). 1 +Rennfahrer 1 +Toren 1 +routinierte 1 +Gelände 1 +kommende 1 +allererster 1 +Klimatisierte 1 +Klassenräume 1 +Nizza 1 +Entschuldigen 1 +Seppänen 1 +entziehen 1 +warnen 1 +Wetterverhältnisse 1 +Vorgeschmack 1 +verwendeten 1 +handelte 1 +Jordanien 1 +genutzten 1 +Lehrbücher 1 +1994 1 +redaktionellen 1 +Westbank 1 +Gaza 1 +lebende 1 +souverän 1 +beschränkte 1 +implizit 1 +drastisch 1 +Gottes 1 +religiösem 1 +Idiom 1 +160; 1 +säkularer 1 +NEW 1 +YORK 1 +Ressource 1 +interessant 1 +informativ 1 +Festigung 1 +Zentrifugalen 1 +Turboverdichter 1 +(Laufrad 1 +außerhalb) 1 +Antriebs-Schlupf-und 1 +Ausgang 1 +Laufrades 1 +gestapelt 1 +kinetische 1 +Silvester 1 +Gala-Dinner 1 +Halb- 1 +Vollpension 1 +Iria 1 +Degen 1 +gestaltete 1 +Jasper 1 +puristisch 1 +sinnlich 1 +edel 1 +verhelfen 1 +Kleinunternehmen 1 +Arme 1 +Käse 1 +Beachte 1 +Bänderung 1 +Felsblöcke 1 +dunklen 1 +basalen 1 +(18.7.2008). 1 +Einbau 1 +Loks 1 +Schnittstellen-Stecker 1 +zutrauen 1 +irgendeinem 1 +Gewinnen 1 +spannender 1 +Casinoatmosphäre 1 +Detaillierte 1 +schrittweise 1 +Aufgabenloesungen 1 +vollstaendigen 1 +theoretischen 1 +Begruendungen 1 +Rechengang 1 +ausfuehrlichen 1 +Zahlungsverzug 1 +Handelsgeschäften 1 +Wirtschaftswachstums 1 +Befehl 1 +build-dep 1 +lädt 1 +apt-get 1 +Paketes 1 +empfohlen 1 +Rechtskulturen 1 +Rechtskultur 1 +Zweizimmerwohnung 1 +verf 1 +gt 1 +Aufenthaltsraum 1 +bereich 1 +K 1 +chenzeile 1 +Bettcouch 1 +STRAT 1 +GO 1 +Visual 1 +.NET 1 +NAS 1 +um) 1 +Festnetz-Gigabit 1 +(10 1 +1000 1 +20.000 1 +Aktiengesellschaft 1 +MDAX 1 +gelistet 1 +(WKN: 1 +660 1 +ISIN: 1 +DE0006602006). 1 +Reichen 1 +arm 1 +intelligenter 1 +anstrengender 1 +umfassender 1 +Armen 1 +Rechtsprechung 1 +glasklar 1 +Vermeidung 1 +gefährlicher 1 +Marktverzerrungen 1 +Redefreiheit 1 +Verbote 1 +Androhung 1 +Todes 1 +Einschalten 1 +Zündung 1 +Benzinmotor 1 +angeschaltet 1 +warmgelaufen 1 +Diabetes: 1 +insulinabhängigen 1 +nichtinsulinabhängigne 1 +Heranwachsende 1 +Spezialgebiet 1 +Hochfrequenztechnik 1 +Simulation 1 +hochfrequenter 1 +Schaltungen 1 +Antennen 1 +schlage 1 +Haushaltsdisziplin 1 +vereinigt 1 +2367 1 +erfährt 1 +Troi 1 +plötzlichen 1 +empathischen 1 +Enterprise 1 +kosmischen 1 +Band 1 +gefangen 1 +zweidimensionalen 1 +Lebewesen 1 +vertretenen 1 +wählerisch 1 +Rassismusgesetzgebung 1 +Karts 1 +Losverfahren 1 +Eichung 1 +Gesamtgewichts 1 +Kick 1 +Off 1 +Sensible 1 +Soccer 1 +Haftbedingungen 1 +Masarykovo-Platzes 1 +Stadtzentrums 1 +Jugenstil-Hotel 1 +architektonisch 1 +wertvollsten 1 +Jihlavas 1 +Barroso: 1 +Šemeta 1 +Machbarkeitsstudie 1 +Roerichs 1 +Shambhala-Suche 1 +Grünwedels 1 +worden- 1 +Wegweiser 1 +(tib 1 +Konjunkturbereinigt 1 +griechische 1 +Gesamthaushalt 1 +Überschuss 1 +0,6% 1 +BIP 1 +aufweisen 1 +dergestalt 1 +Feierlichkeiten 1 +zusammenfällt 1 +Verifizierung 1 +kooperativ 1 +Gav 1 +Planeten 1 +Tellar 1 +Rezitiere 1 +Sure 1 +Al-Fatiha 1 +verhalte 1 +vorigen 1 +Gebetseinheiten 1 +Taschahhud 1 +beendest 1 +dein 1 +Friedensgruß 1 +Ardo 1 +Rangelrooij 1 +Technischen 1 +Komitee 1 +Entitäts-Auflösung 1 +XML 1 +Katalog-Spezifikation 1 +Spam 1 +Anfangs 1 +Gegebenheit 1 +Euronews 1 +Jubiläumsfestes 1 +Petrus 1 +Einsehen 1 +schob 1 +Regenwolken 1 +Jugendlicher 1 +körperlichen 1 +visuellen 1 +auditiven 1 +Klassen 1 +Einnahmen 1 +CHF 1 +60,00 1 +Zustellbetten 1 +298 1 +verwaiste 1 +siebten 1 +Bankiers 1 +wirken 1 +Kundengeschäft 1 +Verdachts 1 +Zaune 1 +brechen: 1 +chemische 1 +währenden 1 +keynesianischen 1 +verwalteten 1 +regulierten 1 +kapitalistischen 1 +Volkswirtschaft 1 +Vollbeschäftigung 1 +aufrechterhalten 1 +mäßige 1 +Konjunkturschwankungen 1 +beliebteste 1 +Andalusien 1 +verpflichten 1 +Schwefelgehalt 1 +ppm 1 +zwingen 1 +Spielraum 1 +Feriendorf 1 +Gallanti 1 +Pomposa 1 +Parks 1 +Deltas 1 +Pos 1 +Ferrara 1 +G20-Gipfel 1 +gefassten 1 +Entlastungen 1 +statt) 1 +(Aussprache) 1 +leisten: 1 +erstens 1 +Grundbuchs 1 +Klassenzimmer 1 +verlegen 1 +Villar 1 +mittelalterlichen 1 +Valle 1 +Vilare 1 +Gast 1 +bucht 1 +servieren 1 +Obstkorb 1 +Willkommen-Getränk 1 +(hausgemachter 1 +Szekler 1 +Bier 1 +Sekt)! 1 +Ostlibyen 1 +Gegenteil 1 +Verzögerungen 1 +beschädigen 1 +landschaftlicher 1 +Abfahrten 1 +Beruf 1 +Nagelstylisten 1 +anerkannte 1 +fachliche 1 +Ressort 1 +Dachser-eigenen 1 +Roll-out 1 +Curriculums 1 +html 1 +code 1 +w3c 1 +konform 1 +(zugänglich 1 +behinderte) 1 +anforderungen 1 +suchmaschienen 1 +(google 1 +Hauptsaison 1 +Nächte 1 +personelle 1 +Unseres 1 +berechtigten 1 +vulgärsten 1 +Instinkten 1 +geschriebene 1 +Antworten 1 +zugeschickt 1 +parlamentarisch 1 +aufrufen 1 +APS 1 +-Status 1 +Kolumbien 1 +Kindes 1 +Gehirns 1 +körperliche 1 +kognitive 1 +Spracherwerb 1 +Großschleuse 1 +eingebaute 1 +Scherenhebetische 1 +Ausladung 1 +Heberückwand 1 +dekorieren 1 +ländliche 1 +Hospedería 1 +Monfragüe 1 +Torrejón 1 +Rubio 1 +Villareal 1 +Carlos 1 +strittige 1 +Politikansätze 1 +zurückzuführende 1 +Schülerzahl 1 +Unterricht 1 +pädagogische 1 +Politikbereiche 1 +AKP-Länder 1 +Gelegenheiten 1 +Paritätischen 1 +Versammlung 1 +AKP 1 +Mauritius 1 +Verstärkung 1 +Eingesetzt 1 +Drucktechnik 1 +Schilderproduktion 1 +Suche-Funktion 1 +Verweisung 1 +bezogenen 1 +umsichtig 1 +Anleger 1 +(Freiverkehr) 1 +Frankfurter 1 +Wertpapierbörse 1 +europaweit 1 +vereinheitlichten 1 +Transparenzanforderungen 1 +Anlegerschutzbestimmungen 1 +ACHTUNG 1 +Tom 1 +Clancy 1 +vorwegnehmen 1 +Geschlechterproblematik 1 +schulen 1 +Nördlich 1 +Lima 1 +Bodenqualität 1 +Regenfall 1 +Flussoasen 1 +1543 1 +Astronom 1 +Nikolaus 1 +Kopernikus 1 +Drehungen 1 +Himmelskreise 1 +widersetzte 1 +dreht 1 +behauptete 1 +drehe 1 +LEWA 1 +platzieren 1 +FuE-Abteilung 1 +freundliches 1 +personal 1 +Linienflüge 1 +Innsbruck 1 +Feiertagen 1 +geänderter 1 +Flugplan 1 +klassischen 1 +Schlosses 1 +demnach 1 +Suite) 1 +altem 1 +Kleinkind 1 +unakzeptabel 1 +Suite 1 +(im 1 +sehen) 1 +Danesin 1 +188 1 +398 1 +www.cafe-bazar.at 1 +WebMarketing 1 +ncm.at 1 +Subsidiaritätprinzips 1 +• 1 +bernahme 1 +Mietwagens 1 +Kaution 1 +300-500 1 +hinterlegen 1 +Ad-hoc-Verbindungskomitees 1 +Palästinensern 1 +Gebermechanismus 1 +Tokio 1 +Versprechungen 1 +Galerien 1 +Höhepunkt 1 +Skischule 1 +Wintersports 1 +vergrössert 1 +abdecken 1 +oberste 1 +Gerichtsbarkeit 1 +House 1 +Lords 1 +saß 1 +ersetzt 1 +Detailfragen 1 +sonstige 1 +HADEP 1 +Belange 1 +Appartment 1 +Anbieter 1 +vertraglich 1 +Menschenrechtsklausel 1 +dringen 1 +Lehren 1 +Nitrofenskandal 1 +Skandal 1 +Whyeth 1 +Pharmaceutical 1 +Bioland 1 +Ausübung 1 +effektiven 1 +Einflusses 1 +Verwaltungsbehörden 1 +Gebietskörperschaften 1 +vorläufige 1 +Zusage 1 +korrigiert 1 +intensive 1 +Thomas 1 +loben 1 +Beschäftigungs- 1 +Sozialpolitik 1 +Lissabonner 1 +(HU) 1 +brauche 1 +allerletzte 1 +Sonderregelung 1 +Schiffbau 1 +Allgemein 1 +How 1 +much 1 +that 1 +Kaufabsicht 1 +gewertet 1 +ggf. 1 +Ersetzung 1 +Keiner 1 +absehen 1 +verlaufen 1 +abwärtskompatibel 1 +übersetzt 1 +unbehandelt 1 +lockere 1 +Griffe 1 +Kastentüren 1 +lehne 1 +nährwert- 1 +gesundheitsbezogene 1 +beschlossenen 1 +Skeptizismus 1 +Erwartung 1 +Akt 1 +Rheingoldhotel 1 +whimsical 1 +einladenden 1 +warmen 1 +Romulaner 1 +Isolationsphase 1 +mehre 1 +Außenposten 1 +drie 1 +patriot 1 +league 1 +Mitgliederzahlen 1 +betreuen 1 +eingebauten 1 +QuickPicks 1 +Sichern 1 +Kalkulationen 1 +Multimedia-Daten 1 +SPE 1 +Bereitet 1 +SIQ 1 +GLI 1 +Produktzertifikat-Verleihung 1 +Gange 1 +13.7 1 +Datenverlust 1 +typischen 1 +Wiederherstellungsaufwand 1 +beschränkt 1 +regelmäßiger 1 +gefahrentsprechender 1 +Anfertigung 1 +Sicherheitskopien 1 +Gegenstand 1 +Dereglementierung 1 +Subventionsabbau 1 +Diktatoren 1 +Abwälzung 1 +Plattfischsektor 1 +Kabeljau 1 +Beifang 1 +gefischt 1 +biologischen 1 +EU-Richtlinien 1 +Crypto 1 +2007-Konferenz 1 +Brechen 1 +Fahrzeugen 1 +genutzen 1 +Wegfahrsperren-Chips 1 +Politische 1 +skizzierten 1 +registriert 1 +Rechtsverbindlichkeit 1 +Resultate 1 +Bedürfnissen 1 +einjährigen 1 +Jubiläum 1 +DUKE 1 +Geburtstags-Menü 1 +kreiert 1 +edles 1 +5-Gang 1 +Hummermedaillons 1 +kross 1 +gebratenem 1 +Müritzzander 1 +sautierten 1 +Pfifferlingen 1 +Sorbet 1 +Holunderblüten 1 +aufgegossen 1 +Champagner 1 +Barbarieentenbrust 1 +Ingwerkirschen 1 +Spitzkohl 1 +Kartoffel-Zucchinigratin 1 +unheimlich 1 +funktionale 1 +Raumschiffe 1 +Durchbruchsregel 1 +fristgerecht 1 +Fertigstellung 1 +externer 1 +Sites 1 +genügt 1 +Konfrontation 1 +starker 1 +profunder 1 +Weltmacht 1 +Hierarchie 1 +behindert 1 +MultitablingAuf 1 +TrickyPlay 1 +Tischen 1 +Zusammen 1 +automatischen 1 +Archivierung 1 +Hintergrundsuche 1 +Verkäufern 1 +gezielt 1 +überwachen 1 +ME 1 +Speicherkarte 1 +betrieben 1 +Programmkomponenten 1 +Aufenthalten 1 +wöchentliche 1 +Reinigung 1 +passen 1 +Zeilenanfang- 1 +Zeilenende-Konstrukte 1 +Zeichenkette 1 +Zeilenumbruch 1 +Porth 1 +Avallen 1 +Zulassung 1 +verfüttert 1 +anspruchsvolles 1 +umantis 1 +bestmöglich 1 +laufend 1 +Klarstellung 1 +sparsam 1 +umgehen 1 +Internet-Seite 1 +wildlebenden 1 +Tier- 1 +Pflanzenarten 1 +erhofft 1 +geschriebener 1 +Grundrechtekatalog 1 +Integrationswert 1 +stärkeres 1 +rechtsethisches 1 +Demnach 1 +gekoppelt 1 +Einhalten 1 +Kyoto-Protokolls 1 +WJT 1 +EVENTS 1 +NEWS 1 +eBENEDICT.org 1 +Veranstaltungshinweisen 1 +Pilgern 1 +Weltjugendtag 1 +Australia 1 +Papal 1 +Plate 1 +winner 1 +Croatia 1 +IP-Adresse 1 +Geschäftsbereich 1 +Militärische 1 +Transportflugzeuge 1 +A400M 1 +Tankflugzeugprogramme 1 +Zugangs: 1 +Richtlinienvorschlag 1 +unklaren 1 +Regulierungsbehörde 1 +Krug 1 +Hocker 1 +Bilanzen 1 +BIP-Zahlen 1 +tunesische 1 +Justizsystem 1 +unabhängig 1 +Luzern 1 +unbekümmert 1 +Jimmy 1 +Robert: 1 +Consensus 1 +rouge 1 +noir 1 +16-mm-Film 1 +Begleitkommentar 1 +Saskia 1 +Keyser 1 +Entschließungsentwurf 1 +liest 1 +Versehen 1 +robot 1 +aspirateur 1 +autonome 1 +automatique 1 +Rendezvous-Klausel 1 +Ibn 1 +Maruán 1 +Gründer 1 +Badajoz 1 +Marvão 1 +lebte 1 +verließ 1 +Bestrebungen 1 +Autonomie 1 +Schlachtfeldern 1 +präsent 1 +Gaza-Streifen 1 +West 1 +Bank: 1 +israelische 1 +Übergriffe 1 +Panzerfeuer 1 +palästinensisch 1 +kontrollierte 1 +Feuergefechte 1 +Tötung 1 +ausgewählter 1 +Rückverfolgbarkeit 1 +quasi 1 +Papierform 1 +zimmer 1 +extra 1 +Intelligenz 1 +angehen 1 +Belegschaft 1 +wettbewerbswidrigen 1 +schlucken 1 +heiße 1 +Nordhalbkugel 1 +passt 1 +Erscheinen 1 +unbequeme 1 +(An 1 +Inconvenient 1 +Truth), 1 +Dokumentarfilm 1 +US-Vizepräsidenten 1 +Al 1 +Gore 1 +berühren 1 +Sekunde 1 +therapeutischen 1 +Protektion 1 +Aufsplitterung 1 +Marktes 1 +Arnab 1 +Goswami 1 +Chefredakteur 1 +Nachrichtennetzwerks 1 +Weltfriedenssymposiums 1 +Birmingham 1 +England 1 +welchem 1 +betrachtet: 1 +Abtreibungen 1 +Rückgang 1 +Müttersterblichkeit 1 +Mariahilfer 1 +Wiens 1 +längste 1 +beantwortet 1 +Transportkiste 1 +mitgenommen 1 +wilder 1 +Berglandschaft 1 +abgelegenen 1 +Seen 1 +reißenden 1 +Bächen 1 +Flüssen 1 +geheimnisvollen 1 +Wäldern 1 +geprägt 1 +Kommunalverwaltung 1 +niederschmetternder 1 +Ideologie 1 +Wählern 1 +zurückgewiesen 1 +Öffnen 1 +Startmenü 1 +Start- 1 +Einstellungen- 1 +Doyle 1 +6.Stock 1 +ruhig 1 +Skifahren 1 +Nachtleben 1 +ABC 1 +(Teil-) 1 +Rockmusik 1 +nah 1 +aneinander 1 +Stones 1 +Highwayman 1 +Yates 1 +lebe 1 +Schlössern 1 +wohnen 1 +beheizen 1 +reichende 1 +Bundesanzeigers 1 +§ 1 +161 1 +AktG 1 +maßgeblichen 1 +Anhäufung 1 +Rechtsinstrumenten 1 +Vervielfachung 1 +Ausschüssen 1 +vermeiden; 1 +effektiver 1 +Äußerst 1 +niedrige 1 +verstärktem 1 +Finanzspekulation 1 +Finanztermingeschäfte 1 +Realwirtschaft 1 +Alkacon 1 +Arglist 1 +Vorsatz 1 +grobe 1 +Fahrlässigkeit 1 +Produkthaftungsgesetz 1 +unberührt 1 +freigesetzt 1 +Inflexibilität 1 +Inelastizität) 1 +Preisreaktion 1 +wahrgenommenen 1 +Maismangel 1 +Telefonzellen 1 +abgebaut 1 +rentabel 1 +dringender 1 +gebraucht 1 +anderenorts 1 +Ben 1 +Ali 1 +Trabelsi 1 +zugute 1 +bezaubernde 1 +Colombiers 1 +heisst 1 +effektive 1 +großem 1 +Slowenien 1 +grosse 1 +nützliche 1 +Anlaufstelle 1 +Tool 1 +gedient 1 +Teilzeitarbeitsplätze 1 +freiwilliger 1 +Beschäftigungsabläufe 1 +vielversprechender 1 +fruchtbare 1 +wirkungsvolle 1 +Bürgerbeauftragten 1 +Petitionsausschuß 1 +Überprüfen 1 +eventuell 1 +Ändern 1 +Firmware-Einstellungen 1 +Unterrichtsaufgaben 1 +allgemeineren 1 +kognitiver 1 +Leistung 1 +Leseleistung 1 +Beziehung; 1 +Indikator 1 +Auswirkung 1 +Pausenzeiten 1 +Geschäftsreisende 1 +Urlauber 1 +Indirekt 1 +Anbietern 1 +Erbringung 1 +Postdiensten 1 +umstritten 1 +Ursprungs- 1 +Veranstaltungsort 1 +Bedürfnis 1 +Keramikfliesen 1 +Besten 1 +Ausstell- 1 +Verkaufssalon 1 +Vinjani 1 +festlegt 1 +denjenigen 1 +Ausbeuter 1 +Amnestie 1 +bewährter 1 +Leitsystemtechnik 1 +PSI 1 +Production 1 +innovatives 1 +integriertes 1 +Betriebsüberwachungssystem 1 +Verkehrstelematik 1 +stößte 1 +zufällig 1 +Interview 1 +film 1 +Inglorious 1 +Basterds 1 +korrektiver 1 +Fantasie 1 +latenten 1 +ansetzen 1 +FP7 1 +Baustein 1 +Arbeitsdokument 1 +Aktivitätsbereich 1 +formuliert 1 +eindringlich 1 +dahingehende 1 +Europa-Hotel 1 +posizionato 1 +Anrufe 1 +Reims 1 +voran 1 +Vorteilen 1 +teilhaben 1 +Standardzimmern 1 +1880 1 +Versilia 1 +ausgedehntes 1 +ungebändigtes 1 +Meeresgebiet 1 +unbewohnt 1 +sumpfig 1 +Jäger 1 +Fischer 1 +wussten 1 +dachte 1 +mir: 1 +hilf 1 +LEDs 1 +Fotodioden 1 +Fototransistoren 1 +Taster- 1 +Laserdiodenmodule 1 +Waddington 1 +Erwägung 1 +eingebrachten 1 +mündliche 1 +(2-11 1 +Jahre) 1 +Erwachsenentarifes 1 +zuzüglich 1 +Gebühren 1 +eventueller 1 +Zuschläge 1 +unkomplizierten 1 +beruflich 1 +gefunden: 1 +Desktops 1 +Lenovo 1 +Vgl 1 +Oliver 1 +Loew: 1 +Psychogramm 1 +Heft 1 +33-51 1 +(Das 1 +A19 1 +Pfarrhaus 1 +freundlicher 1 +bemühter 1 +post-amerikanischen 1 +späten 1 +zwanzigsten 1 +gesellschaftspolitisch 1 +separaten 1 +Sitzbereich 1 +Zentralbanken 1 +30.000 1 +dahinter 1 +dritter 1 +verfügbarer 1 +Kraftfahrzeuge 1 +verursachten 1 +Verkehrsmanagements 1 +Handelt 1 +kurzfristiges 1 +Phänomen 1 +baldige 1 +Rückkehr 1 +schnellen 1 +Finanzsektors 1 +erholt 1 +fürchte 1 +schwingen 1 +Ausreden 1 +Außenpolitik 1 +fällt 1 +Clintons 1 +Kriegsspielen 1 +Gebirgsregionen 1 +Warcraft 1 +urheberrechtlich 1 +geschützte 1 +Blizzard 1 +Entertainment 1 +Inc 1 +Aufenthaltsrecht 1 +nachgehen 1 +Stern: 1 +lästiger 1 +mühelos 1 +belgische 1 +Brügge 1 +Antwerpen 1 +Daß 1 +Hauptleidtragenden 1 +Schaden 1 +verankert 1 +achtet 1 +Rentner 1 +inständig 1 +Inhaber 1 +Verantwortlicher 1 +Tre 1 +Ci 1 +Luce 1 +S.p.A. 1 +Firmensitz 1 +C.so 1 +(Città 1 +Satellite) 1 +20020 1 +Cesate 1 +(MI) 1 +Italy 1 +Unruhe 1 +Perpignan 1 +Literaturzentrum 1 +vollbusigen 1 +Mädels 1 +juckt 1 +Arschloch 1 +Einleitungen 1 +Aquila 1 +Priszilla 1 +Handvoll 1 +effizientesten 1 +Kollaps 1 +anfänglichen 1 +Boom 1 +Palettensets 1 +Funktionen 1 +Layoutaufbau 1 +Entwürfe 1 +Uganda 1 +radikale 1 +setzten 1 +Minen 1 +Adminbereich 1 +VTC 1 +Konfigurationsmöglichkeiten 1 +überhäuft 1 +teuren 1 +raffinierten 1 +Geschenken: 1 +Zärtlichkeit 1 +entgegen 1 +Trachten 1 +vornehmlich 1 +Einheimischen 1 +weite 1 +aufbrechende 1 +Donner 1 +ueberseltsame 1 +Klaenge 1 +hervorbringende 1 +Bande 1 +ungehobelten 1 +Musikern 1 +Elbflachland 1 +Erzählungen 1 +griechischen 1 +Mythologie 1 +unterirdische 1 +Vulkans 1 +bedeutende 1 +Skistationen 1 +Herunterfahren 1 +atemberaubenden 1 +Grundfreiheiten 1 +Beitrittsverhandlungen 1 +polnischen 1 +Staatsbürger 1 +2221 1 +Kleinund 1 +Mittelbetriebe 1 +Tagungsgäste 1 +Getränken 1 +verwöhnt 1 +Energiespendende 1 +Zutaten 1 +Vitamine 1 +Dynamik 1 +Oh 1 +Zwischenrufe 1 +schmiedete 1 +Messer 1 +eisernen 1 +Fassringen 1 +zurückgeblieben 1 +Mühe 1 +Lebensbereiche 1 +Jean 1 +Botti 1 +Chief 1 +Technical 1 +Officer 1 +amtierender 1 +(SL) 1 +Allister 1 +herausgestellten 1 +verurteilungswürdigen 1 +Handlungsbedarf 1 +Werbeverbot 1 +Gesundheitsschädigende 1 +EG-Vertrag 1 +Tarnungs- 1 +Aufklärungstechniken 1 +Shinobi 1 +Ninjutsu 1 +Kadampa-Meditationszentrum 1 +Tharpa-Verlags 1 +gemeinnütziger 1 +Verlag 1 +Geshe 1 +Kelsang 1 +Gyatso 1 +ausgeglichenen 1 +Ausstieg 1 +Aufschwungs 1 +Nachwirkungen 1 +(NL) 1 +undenkbar 1 +Rompuy 1 +aufrichtig 1 +Weins 1 +Drehteilemodul 1 +Gawain 1 +Durchlauf 1 +Koordinatenpunkten 1 +Programmiermodul 1 +Kynon 1 +Programmzeilen 1 +Webmaster 1 +Partnerprogramm 1 +Anmeldeformular 1 +wahrheitsgemäß 1 +auszufüllen 1 +Einigkeit 1 +Terence 1 +Wynn 1 +Elmar 1 +Brok 1 +Softwarepatente 1 +Erdogan 1 +Männer 1 +stolz 1 +vorankommen 1 +Dschibuti 1 +Betätigungsfreiheit 1 +(PT) 1 +Inkrafttreten 1 +Absolut 1 +sehenswert 1 +Kathedrale 1 +Fiore 1 +Repubblica 1 +Signoria 1 +Duomo 1 +Giotto 1 +Wertschätzung 1 +Hänsch 1 +geführten 1 +Aussprachen 1 +teilnimmt 1 +Nahrungsmittelindustrie 1 +Tourismusindustrie 1 +Sportveranstaltungen 1 +tägliche 1 +Ausbreitung 1 +Seuche 1 +Varone 1 +-Wasserfälle 1 +Gorg 1 +Abiss- 1 +Wasserfälle 1 +Tremosine 1 +Herbert 1 +Bader 1 +Geschäftsführer 1 +konzerneigenen 1 +Forschungs- 1 +Entwicklungsgesellschaft 1 +TECHNOLOGIES 1 +Produkt- 1 +Verfahrensentwicklungen 1 +NORDENIA-Gruppe 1 +gebündelt 1 +Beschluss 1 +Opposition 1 +undemokratisches 1 +Pressegesetz 1 +teilzunehmen 1 +Physik 1 +MeinWiki 1 +(Achtung: 1 +Wiki 1 +Verweis 1 +zurück-back-geri 1 +Bayview 1 +Promenade 1 +Sliema 1 +Marsamxetto 1 +Maltas 1 +Valletta 1 +Roswitha 1 +juengeren 1 +Natale 1 +zweiundzwanzig 1 +Chefkoch 1 +genuegend 1 +Kuechenchef 1 +Gaesten 1 +Genuesse 1 +Schande 1 +Rahel 1 +zögerlich 1 +Job 1 +Galeri 1 +Petronas 1 +gekündigt 1 +brandaktuellen 1 +Festivals 1 +Ausstellungen 1 +Konzerte 1 +Modernes 1 +city 1 +apartement 1 +SZ 1 +Kueche 1 +vermieten 1 +Goblin- 1 +Funkenmagier 1 +Schadenspunkt 1 +Erweiterungsfrage 1 +Sorglos-Paketes 1 +Servicetechniker 1 +Störungen 1 +SMS 1 +sensiblen 1 +beglückwünsche 1 +weigerte 1 +abzulehnen 1 +obgleich 1 +Klausel 1 +besagt 1 +Handelsvereinbarung 1 +beendet 1 +Terrororganisationen 1 +Speicherkarten 1 +Umsatzsteueridentifikationsnummer 1 +(VAT 1 +IVA 1 +TVA) 1 +englische 1 +17.5% 1 +anzurechnen 1 +Aktive 1 +Na 1 +Zámečku 1 +anschließenden 1 +Relax 1 +Entspannung 1 +Sportleistung 1 +liebt 1 +Unseren 1 +Outsourcingservice 1 +Hauptgeschäftstätigkeit 1 +buchhalterischen 1 +zuverlässig 1 +Eingebettet 1 +Mas 1 +Gigaro 1 +Résidence 1 +Les 1 +Marronniers 1 +Baie 1 +Cavalaire 1 +handwerklichen 1 +Flotten 1 +Mehrartenfischerei 1 +Sarud 1 +Tagungsraum 1 +Loungebar 1 +Moeglichkeiten 1 +Fortbildung 1 +Marketing 1 +Pilotprojekte 1 +aufstrebenden 1 +Indien 1 +Südkorea 1 +Diagramme 1 +Vorschaubild 1 +Images 1 +Debian-Installer-Entwicklern 1 +üblicherweise 1 +Rechnern 1 +Zile 1 +Winter-Camping-Hit 1 +besuch 1 +Aufsichtsrats 1 +erfolgsorientierte 1 +Gründervätern 1 +erdacht 1 +Später 1 +beeinflusste 1 +Replikation 1 +Eiweiße 1 +mehrzelligen 1 +Organismen 1 +Gleiche 1 +Tränen 1 +Trunkenheit 1 +spezielle 1 +Servoantrieb 1 +Donnelly 1 +Thyssen 1 +Secchi 1 +bekundet 1 +indischen 1 +Subkontinent 1 +Kommissionsmitglied 1 +Georgieva 1 +koordinieren 1 +Soulier 1 +AIBO 1 +bewegte 1 +Kreis 1 +jagte 1 +Batterie 1 +leer 1 +zusammenbrach 1 +Installée 1 +les 1 +anciens 1 +locaux 1 +gare 1 +Bize 1 +CALANDRETA 1 +LO 1 +CIGAL 1 +bénéficie 1 +espace 1 +tranquilité 1 +nordischen 1 +Menschenrechten 1 +ASEAN-Land 1 +gewisses 1 +Anträge 1 +temporäre 1 +flüchtige 1 +Mailinglisten 1 +1977 1 +1995 1 +1974 1 +1986 1 +88 1 +abschließende 1 +INTERNATIONAL 1 +Gesellschafteranteile 1 +CORONOR 1 +Composites 1 +(tief), 1 +Wilh.-Leuschner-Platz 1 +Bayerischer 1 +Bahnhof). 1 +durchgängige 1 +Nord-Süd-Achse 1 +ausgerichteten 1 +Kopfbahnhof 1 +Cattania 1 +bürgerliche 1 +Inneres 1 +bemerkenswerte 1 +Berichtsentwurf 1 +Rating-Agenturen 1 +95% 1 +Geschäftes 1 +Null-Signal 1 +verarbeitet 1 +dringendes 1 +Ausschüsse 1 +Wertpapierregulierungsbehörden 1 +(CESR) 1 +Besonderen 1 +Etats 1 +Javascript-Interpreter 1 +Speicheradresse 1 +Angreifern 1 +Denial 1 +(Absturz 1 +Applikation) 1 +auszulösen 1 +auszuführen 1 +LIVING 1 +TOWER 1 +Phantasy 1 +Landscape 1 +Visiona 1 +Vollendung 1 +nutzt 1 +Dreidimensionalität 1 +Raumes 1 +ausgewertet 1 +Schutzmechanismen 1 +Mountainbikeprofis 1 +Angebotspakete 1 +Beabsichtigt 1 +Medienbereich 1 +beherrschenden 1 +äußerster 1 +Wachsamkeit 1 +eingeschlagenen 1 +Città 1 +Parenzo 1 +Kongresszentrums 1 +Giusto 1 +Informationsfluss 1 +ordnungsgemäße 1 +Audits 1 +geleiteten 1 +Dienststellen 1 +Mini-Suite 1 +schmiedeeiserne 1 +Veranda 1 +Synergien 1 +intelligenten 1 +GPS 1 +PhotoMapper 1 +NG2 1 +Mausklicks 1 +übersichtlichen 1 +PDF-Report 1 +Würden 1 +elastischen 1 +Gurt 1 +hängend 1 +stürzen 1 +heutzutage 1 +EMV 1 +Ingenieure 1 +Spaß 1 +Teamgeistes 1 +erläutern 1 +Sicherheitsrates 1 +P-5 1 +") 1 +verteidigen 1 +Resolutionen 1 +unübertroffen 1 +Entscheidungsträgern 1 +freiem 1 +Umweltschutz 1 +1.1.1993 1 +identisch 1 +Heileen 1 +Beraubung 1 +Kollektivbestattungen 1 +(bis 1 +Individuen 1 +Grab). 1 +Obgleich 1 +geneigt 1 +votieren 1 +Collingham 1 +begrüßenswert 1 +Verteidigungsmarktes 1 +Verteidigungsmarkt 1 +Posselt 1 +Fair-Trade-Bewegung 1 +gezielte 1 +Ersuchen 1 +verselbständigt 1 +Laptop 1 +Teamkalender 1 +Kennwortschutz 1 +Backup-Modus 1 +Verschieben-Modus 1 +(optional), 1 +Öffentliche 1 +Sprach-Namen 1 +NT-Dienste 1 +Ń 1 +ďîěîůüţ 1 +ôîđěű 1 +ýňîň 1 +ěîäóëü 1 +ďîçâîë 1 +ĺň 1 +ďîńĺňčňĺë 1 +ě 1 +îńňŕâë 1 +ňü 1 +ęîěěĺíňŕđčč 1 +ę 1 +íîâîńňč 1 +íŕ 1 +ńŕéňĺ 1 +Yasser 1 +Mohammed 1 +Al-Halabi 1 +rechtsverbindlichen 1 +niedergelassene 1 +Campanile 1 +Orgeval 1 +Poissy 1 +A13 1 +A14 1 +Aufenthaltsort 1 +Ausflüge 1 +Top-Lage 1 +Isla 1 +Mayor 1 +nannten 1 +gewordener 1 +Traum 1 +Wessen 1 +Böge 1 +Begriffsbestimmung 1 +geographische 1 +Lieferketten 1 +Gračanica 1 +unteren 1 +Spreča-Tal 1 +Verkehrswegs 1 +Doboj 1 +glaubt 1 +Städtchen 1 +Mittelalter 1 +Soko 1 +Sokoluša 1 +Bach 1 +phantastische 1 +SPA 1 +Beauty 1 +Miramar 1 +Tabelle 1 +Prüfbedingungen 1 +Beispiele 1 +prüfenden 1 +Polymertyp 1 +MediaCluster 1 +langjährigen 1 +Entwickler 1 +webbasierter 1 +professionelles 1 +Online-Marketing 1 +Herzlichen 1 +Janusz 1 +Onyszkiewicz 1 +PRADA 1 +NARGESA 1 +Metallurgiesektor 1 +35 1 +Industriemaschinen 1 +Biegemaschinen 1 +stanzmaschinen 1 +räummaschinen 1 +schweißen 1 +profileisenbiegemaschinen 1 +propanöfen 1 +schneidemaschinen 1 +lochmaschinen 1 +torsionsmaschinen 1 +Produktpolitik 1 +behaupten 1 +Lebenszyklusanalyse 1 +Sichtweise 1 +IMAGINE 1 +EYES: 1 +Augenbehandlung 1 +Innovationspreis 1 +BIORIF 1 +ausgezeichnet; 1 +Mirao 1 +Opto 1 +Silbernen 1 +Photon 1 +7,50 1 +zweitens 1 +segnet 1 +dafür; 1 +kurzfristige 1 +Terminänderungen 1 +ABAS 1 +Gewähr 1 +frage 1 +Gemeinschaftsebene 1 +akzeptierten 1 +BSE-Test 1 +Immogo 1 +Ciro 1 +erlitt 1 +pendelte 1 +Exil 1 +Verfassungsvertrag 1 +Amphiphil: 1 +sogenannten 1 +Zwittermolekülen 1 +Molekülende 1 +(Kopf 1 +Schwanz) 1 +bevorzugen 1 +(amphi 1 +griech 1 +radioaktivhaltiger 1 +radioaktivem 1 +Wellnesserlebnis 1 +Tauern 1 +Kaprun 1 +Rangliste 1 +schnellsten 1 +DSL 1 +Provider 1 +verpackt 1 +Lagos 1 +Aushängeschild 1 +reichhaltigen 1 +Buffet 1 +HTTP-Authentifizierung 1 +IIS 1 +PHP-Konfigurationsanweisung 1 +cgi.rfc2616 1 +headers 1 +Voreinstellung) 1 +Behebung 1 +fürs 1 +booten; 1 +boot-Konfigurations-Datei 1 +editieren 1 +Bootloader 1 +abspeichern 1 +ZAJAZD 1 +U 1 +ELIZY 1 +Ojców 1 +Kraków-Olkusz 1 +Czajowice 1 +Krakau 1 +Balice 1 +perkussive 1 +geräuschhafte 1 +Korpussounds 1 +Ausdrucksarten 1 +Sammeln 1 +SDL 1 +Praktikum 1 +Berufserfahrung 1 +trafen 1 +Geflügelfütterung 1 +Rheinischer 1 +Dinklage 1 +MIAVIT-Geschäftsführer 1 +Stefan 1 +Niemeyer 1 +Geflügelmischfutterindustrie 1 +renommierte 1 +Referenten 1 +STADTPLÄNE 1 +übersichtlich 1 +grafischen 1 +Falzung 1 +kehrt 1 +Gier 1 +Quantifikatoren 1 +sodass 1 +voreilig 1 +mögen 1 +älteste 1 +politisch-militärische 1 +Rande 1 +Niederlage 1 +Oberflächlich 1 +stetig 1 +fortschreitende 1 +parallel 1 +Wohlfahrtsstaates 1 +erfolgte 1 +FORMEN 1 +Einweg-Besteck 1 +spezialisiert 1 +automatische 1 +Mehrfach-Hohlformen 1 +Einspritzzyklen 1 +erhältlichen 1 +gefertigt 1 +Formwerkzeugs 1 +Einspritzungen 1 +einst 1 +eigenständige 1 +Zentren: 1 +(kr 1 +Tvrda), 1 +Oberstadt 1 +(Gornji 1 +grad), 1 +Unterstadt 1 +(Donji 1 +grad). 1 +Nichtraucherzonen 1 +Subjektposition 1 +Weibliche 1 +konzipiert 1 +Nicht-Subjekt 1 +irrational 1 +Erkenntis 1 +unkontrolliert 1 +Körperlichen 1 +identifiziert 1 +Haben 1 +bedacht 1 +Ausgrenzung 1 +Hass 1 +schürt 1 +vorgesehene 1 +Blu-ray-Laufwerke 1 +Consumer 1 +Electronics 1 +Prozentsatz 1 +Beutezüge 1 +Rahmenkonstruktionen 1 +Freiluft-Museum 1 +langer 1 +Satz 1 +Wieder 1 +Markenwerk 1 +bundesweites 1 +Sparkassen-Portal 1 +grafisch 1 +solide 1 +Staatsfinanzierung 1 +Aufschwung 1 +kohlenstoffemissionsarmer 1 +Wirtschaftskrise 1 +2,7 1 +Insgesamt 1 +Konsolidierungskreis 1 +1.314 1 +Schutzbedürftigen 1 +unverhältnismäßig 1 +Verwaltungsaufwand 1 +belastet 1 +fragte 1 +Papa 1 +Ask 1 +Ullmann 1 +tragbar 1 +einführenden 1 +Zugangsdaten 1 +Beschreibungen 1 +eigenständig 1 +aktualisieren 1 +Youth 1 +Firenze 1 +Eisenbahnverkehr 1 +Financial 1 +Stability 1 +(EFSF) 1 +Partnerländern 1 +Einbeziehung 1 +AC 1 +Brescia 1 +Revolution 1 +gewannen 1 +Bewußtsein 1 +aktiver 1 +Kollektiven 1 +Kinderbetreuung 1 +Verklebungen 1 +SUPER7 1 +PLUS 1 +belastbar 1 +Ideal 1 +Namensschild 1 +festzukleben 1 +willst 1 +einiges 1 +hältst 1 +Propaganda 1 +POS 1 +Leichtigkeit 1 +Vergütungen 1 +Produktverkäufen 1 +spornt 1 +Anmietung 1 +ausserhalb 1 +bürozeiten: 1 +20,00 1 +anmietung 1 +zeit 1 +23: 1 +Uhr). 1 +etwaige 1 +268 1 +will; 1 +wahrhaftig 1 +Wertvolles 1 +gewährt; 1 +Begabten 1 +verständlich 1 +EU-Länder 1 +Energiemarkts 1 +entgegenstellen 1 +Sonderberichts 1 +verpflichten; 1 +Suchtstoffkontrollamt 1 +sensibilisiert 1 +Rühle 1 +Athleten 1 +Zeitabstand 1 +Sprint 1 +Distanz 1 +12,5 1 +(bzw. 1 +km). 1 +Aarhus-Übereinkommen 1 +steht: 1 +ganzheitlichen 1 +proaktiven 1 +Denken 1 +bestehender 1 +zuviel 1 +zuwenig 1 +Territorien 1 +Änderungsanträgen 1 +Präventionspläne 1 +Grippepandemien 1 +kohärente 1 +Laser- 1 +Thermotransfer- 1 +Tintenstrahldruckern 1 +Plottern 1 +übersichtliche 1 +Geaprodukt 1 +01 1 +03 1 +Kaffee- 1 +Teezubereitungsmöglichkeiten 1 +Begrüßung 1 +segneten 1 +zustimmend 1 +Vermittlungsgebühr 1 +flachste 1 +Duschwanne 1 +Villeroy 1 +Boch 1 +SQUARO 1 +Superflat 1 +Designpreise 1 +Klebe- 1 +Falztechniken 1 +Sortiment 1 +einander 1 +ablösen 1 +Herberge 1 +Sternen 1 +Albert 1 +Beatles 1 +Tate 1 +Seemuseum 1 +Workshop 1 +Bauordnungsamt 1 +Bauaufsichtsamt 1 +Köln 1 +Gerechtigkeit 1 +Wing 1 +Castles 1 +made 1 +Axis 1 +gecoverte 1 +Ballade 1 +n. 1 +Subsidiaritätsprinzip 1 +sinnliche 1 +prägen 1 +Innenbild 1 +Unzulänglichkeiten 1 +Minderheitenpolitik 1 +Versäumnisse 1 +ausgefeilte 1 +hierfür 1 +Rig 1 +Kontrol 1 +MIDI-Unterstützung 1 +DAW-Controller 1 +Plug-Ins 1 +übersehen 1 +ausgelöscht 1 +unbegründete 1 +Ängste 1 +gegenständlichen 1 +Rohstoffen 1 +auswirkt 1 +Lebensmittelpreise 1 +Unzählige 1 +Ertragslage 1 +Besorgnis 1 +Umkreis 1 +Olbia 1 +archäologische 1 +Keine 1 +deinem 1 +detaillierten 1 +veranschaulicht 1 +eigenmächtige 1 +beabsichtigt 1 +Händen 1 +Bezogen 1 +Radaranlage 1 +Größen 1 +(Ps 1 +σ) 1 +veränderliche 1 +Gerätedaten 1 +weltbekannten 1 +Haufen 1 +Schwindler 1 +gemessen 1 +Fabrik 1 +abgelaufen 1 +verkauf 1 +Alcázares 1 +(Murcia) 1 +Freistehende 1 +Ref 1 +durchsetzbar 1 +dürften 1 +entory 1 +Prozesskette 1 +Kapitalmarktinfrastruktur 1 +Mathias 1 +Hlubek 1 +Finanzvorstand 1 +PIIGS-Staaten 1 +Rezessionen 1 +komplizierte 1 +kostenintensive 1 +Produktionsfaktor 1 +LearnForLifeDeutschland 1 +at 1 +gmail.com 1 +(0) 1 +21758077 1 +2600 1 +Ausstellungsfläche 1 +präsentierte 1 +Lisec 1 +Fachpublikum 1 +(55.000 1 +48 1 +Nationen) 1 +Anlagenverbund 1 +Herzstück 1 +Glasscheiben-Sortieranlage 1 +darstellte 1 +klimatisierte 1 +Medientechnik 1 +Veranstaltungsräume 1 +Executive-Conference 1 +Lounges 1 +Art. 1 +Grundsatzvereinbarung 1 +Sekretariat 1 +CBD 1 +Projektes 1 +weltweite 1 +Bewusstseins 1 +Aquakultur 1 +Wasserorganismen 1 +prinzipiell 1 +lobenswerte 1 +Abwracken 1 +Boote 1 +Tsunami 1 +asiatischen 1 +gereicht 1 +Reiseversicherungen 1 +ELVIA 1 +Travel 1 +Insurance 1 +N.V. 1 +(Niederlande) 1 +gezeichnet 1 +Nederlandsche 1 +(DNB) 1 +Generaldirektor 1 +mittelgroße 1 +Universitätsabteilungen 1 +geraten 1 +Eindämmung 1 +Hisbollah 1 +setzte 1 +fälschlicherweise 1 +Adriaticohotels 1 +Silvagni 1 +Cattolica 1 +Gabicce 1 +Mare 1 +Unfalls 1 +Fukushima 1 +Geisel 1 +Furcht 1 +Panik 1 +Privileg 1 +GeneaNet 1 +Malá 1 +Kleine 1 +"). 1 +Dispersion 1 +pastebildendem 1 +PVC 1 +Polymer 1 +Zusätzen 1 +flüchtigen 1 +Spezial-Transport- 1 +Lagercontainern 1 +Ansprüchen 1 +Qualitäts- 1 +Know-how-Vorteile 1 +Gewerke 1 +extern 1 +woanders 1 +begnügt 1 +Staatsstreichs 1 +verhaften 1 +Herkunftsland 1 +auszuliefern 1 +teurer 1 +glaubwürdigste 1 +Yayoi-Bevölkerung 1 +Verwandtschaft 1 +Goryeo 1 +koreanischen 1 +aufweist 1 +Verfahrensweisen 1 +27. 1 +Anreiz 1 +zusammentrafen 1 +Lösungsmöglichkeiten 1 +denkmalsgeschüzt 1 +historisch 1 +wertvollen 1 +Eingesetzte 1 +Radio- 1 +Identifizierungschips 1 +einbauen 1 +22. 1 +Zusatz 1 +US-Verfassung 1 +Portugiesischen 1 +man: 1 +Raten 1 +Köpfe 1 +hineinbekommen 1 +47 1 +ausschlaggebender 1 +Small 1 +Business 1 +Act 1 +Händlern 1 +Aktionsprogramm 1 +Verwenden 1 +Schieberegler 1 +Graut 1 +ne 1 +Originalbildes 1 +dunkler 1 +heller 1 +renovierte 1 +herausragende 1 +Spanischen 1 +Treppe 1 +strenge 1 +Maßstäbe 1 +anlegen 1 +Personenbezogene 1 +Einzelangaben 1 +sachliche 1 +Verhältnisse 1 +bestimmbaren 1 +Titley 1 +Zwar 1 +Automobilsektor 1 +weitergeleitet 1 +Holzweg 1 +gering 1 +beharren 1 +Kinderhilfsorganisation 1 +Save 1 +Children 1 +Gazastreifen 1 +Schlimmeres 1 +gleichgültig 1 +zusieht 1 +schwaches 1 +fertig 1 +schreibt 1 +Lokalisierung 1 +Arbeiter 1 +Landesgepflogenheiten 1 +Einhaltung 1 +Sicherheitsrichtlinien 1 +Markttest 1 +Wirkungsanalyse 1 +Sendern 1 +beschriebenen 1 +Modalitäten 1 +opfern 1 +Professional-Edition 1 +Netzwerkunterstützung 1 +(Einsatz 1 +iTrain 1 +Betriebssystemen) 1 +Schirms 1 +Schalttafel 1 +Lokomotivenübersicht 1 +vorhin 1 +143 1 +Unzulässigkeitsantrages 1 +111 1 +Xfire 1 +Einsendeschluss 1 +Filmwettbewerb 1 +Filmemacher 1 +Meisterwerke 1 +fertigzustellen 1 +Inhaftierungslager 1 +erweisen 1 +OGG 1 +Ogg 1 +Vorbis 1 +Kompressionsalgorithmus 1 +innovativen 1 +Absolutwertgeber 1 +Singleturn-Auflösung 1 +Bit 1 +Genauigkeit 1 +0,1 1 +anzugehen 1 +beträchtlicher 1 +muslimischen 1 +Schwerpunkte 1 +einzeln 1 +Rampenlicht 1 +Hallo 1 +ausprobiert 1 +autoritäre 1 +verhängen 1 +finanzieller 1 +Zimmerpreises 1 +Dicen 1 +todas 1 +crisis 1 +uno 1 +sabe 1 +aprovecharlas 1 +surgen 1 +cambios 1 +positivos 1 +Tuve 1 +suerte 1 +eingehen: 1 +qualifizierter 1 +Mitsprache 1 +Industrietechnik 1 +gehörten 1 +Spitzenzeiten 1 +Werke 1 +Unternehmensbeteiligungen 1 +jedweden 1 +Operators 1 +Stückchen 1 +zurückzugeben 1 +generierte 1 +Ärger 1 +ersparen 1 +Fischfangsmöglichkeiten 1 +Cancun-All 1 +Inclusive 1 +EU-Kommissar 1 +Verheugen 1 +Patientensicherheit 1 +Parallelhandel 1 +facto 1 +Begründung 1 +vermeintliche 1 +gefälschte 1 +Medikamente 1 +Umlauf 1 +kämen 1 +Einvernehmen 1 +Friedensprozesses 1 +(einschließlich 1 +Piratenpartei), 1 +Linken 1 +Haitis 1 +funktionierte 1 +Wirbelstürmen 1 +geschwächt 1 +Erdbeben 1 +zuschlug 1 +ursprünglichen 1 +Schwermetallverbot 1 +Schwermetallen 1 +Rechtsetzungsakten 1 +geregelt 1 +prägepolierte 1 +Manuskript 1 +außergewöhnlich 1 +Auftragen 1 +Metallblatts 1 +9.00 1 +Finanzierungen 1 +entfielen 1 +6,2 1 +Mrd. 1 +erneuerbare 1 +Energien 1 +Windkraft- 1 +Solarenergie-Projekte 1 +bereitstellen 1 +Kopenhagener 1 +unterstützten 1 +Windkraft 1 +tragfähig 1 +Kupfer 1 +Reaktion 1 +vollständiger 1 +Subsidiaritätsprinzips 1 +Kriege 1 +Greise 1 +Kleidung 1 +gehobenen 1 +(Herren 1 +Jackett 1 +Krawatte). 1 +(pte) 1 +schwersten 1 +Finanzkrise 1 +Zweiten 1 +Weltkrieg 1 +Wohnort 1 +ebnet 1 +praktikable 1 +Falscher 1 +Fehlerrückgabewert 1 +Benutzeranmeldung 1 +SSL 1 +Directory 1 +5.x 1 +netzwerktechnisch 1 +erreichbar 1 +lebhaftesten 1 +Wettbewerbs 1 +monolithischen 1 +Blöcken 1 +Kriterium 1 +Klage 1 +vorzuschieben 1 +ähnlicher 1 +herrschen 1 +Kontaktformular 1 +Rechtsverletzungen 1 +Blanca 1 +Mietwagen 1 +Irgendwann 1 +eingeschlafen 1 +IAEO-Generaldirektor 1 +Baradei 1 +Iranern 1 +Zusicherungen 1 +Vertriebspunkte 1 +Softwaren 1 +Normalsatz 1 +15% 1 +Stalls 1 +Freigehege 1 +Ausgangs 1 +Mittelalters 1 +lebensnahen 1 +Porträts 1 +scharfer 1 +zornige 1 +kampfeslustige 1 +Kreuzotter 1 +angeschliffen 1 +Macken 1 +dnach 1 +Hammerite 1 +grün 1 +Hammerschlag 1 +gerollt 1 +Ausbruchs 1 +versagt 1 +anberaumen 1 +Zulässigkeit 1 +Tobi 1 +(26. 1 +18) 1 +Albatron 1 +KX18D 1 +Bios 1 +R110c 1 +Sil 1 +3112A 1 +Bios04.02.84 1 +Bootproblem 1 +RAID0 1 +ID-0 1 +überwindbar 1 +Nosotros 1 +siempre 1 +hemos 1 +conocido 1 +figura 1 +gestor 1 +pero 1 +Reino 1 +Unido 1 +llevan 1 +tiempo 1 +dando 1 +paso 1 +allá 1 +unterschätze 1 +ausstehenden 1 +fortgeschrittenen 1 +Industriezölle 1 +Suchkriterien 1 +Mittelmeergebiet 1 +Gefahrenzone 1 +Verschärfung 1 +Sicherheitsstandards 1 +CE-Sicherheitsmarke 1 +Gefahren 1 +giftiger 1 +Substanzen 1 +eindämmen 1 +Stalaktiten 1 +Stalagmiten 1 +versteinerte 1 +Dinosaurierspuren 1 +Riesenschildkröten 1 +Höhlenmalereien 1 +praekolumbinische 1 +Festungen 1 +Torotoro 1 +Cochabamba 1 +hochrangige 1 +Ehrentribüne 1 +fiskalische 1 +Korrekturen 1 +Strukturreformen 1 +stabil 1 +scheitern 1 +Kriegskasse 1 +liquiden 1 +Ansturm 1 +allmählich 1 +gebilligt 1 +Iren 1 +lettischen 1 +ausgebeutet 1 +nachmittäglichen 1 +Tasse 1 +abendlichen 1 +Drink 1 +Lobby-Bar 1 +Großbildfernseher 1 +Pilotphase 1 +Freiwilligendienstes 1 +Fünfklässlerin 1 +lebt 1 +Celesio 1 +Oft 1 +Nachahmung 1 +Produktpiraterie 1 +verleitet 1 +Vergleich 1 +Original 1 +minderwertige 1 +3.2 1 +rasen 1 +Verantwortlichen 1 +süsse 1 +Karamel 1 +schmecken 1 +frisch 1 +gemahlenen 1 +Kaffees 1 +Kurven 1 +unvergleichbarer 1 +Güte 1 +füllt 1 +Wimmern 1 +Kaffeehaus 1 +Einsparungen 1 +benachteiligten 1 +erwähnen: 1 +Martin 1 +Zuschüsse 1 +Gedanken 1 +earmarking 1 +Weißbuch 1 +befaßt 1 +Gramm 1 +Länge 1 +Millimetern 1 +Streichholzschachtel 1 +echtes 1 +Leichtgewicht 1 +Samsung 1 +WEP-200 1 +elegant 1 +wenigsten 1 +AKP-Ländern 1 +Schwarzafrikas 1 +Häcksler 1 +Rinden 1 +Karre 1 +Kehrbürste 1 +Räumschild 1 +unzähligen 1 +Merkel 1 +Gefolgschaft 1 +verweigert 1 +Kompromiß 1 +findest 1 +Screenshots 1 +King 1 +Bounty: 1 +Legend 1 +stützt 1 +beigemessen 1 +trinkt 1 +ausdenken 1 +Akten 1 +Toni 1 +Mohr 1 +Top-Team-Peking-Kaders 1 +4.2.0 1 +ausgeschaltet 1 +(Fortsetzung) 1 +Rossa 1 +(A6-0030 1 +2004) 1 +laßt 1 +Gerne 1 +aufwachen 1 +würde: 1 +homosexuell 1 +verrückt 1 +Kommission: 1 +54 1 +polnisches 1 +Dell 1 +einzusetzen 1 +OECD-Vertrag 1 +unterschreiben 1 +Werftindustrie 1 +beweisen 1 +irgendetwas 1 +harmlos 1 +äußert 1 +Menschenrechtskonvention 1 +beitreten 1 +dorthin 1 +touristischen 1 +Skisportzentren 1 +Esperanto 1 +eurozentrisch 1 +Bemängelt 1 +orientiert 1 +stattgefundenen 1 +Frühlingstreffens 1 +Washington 1 +führenden 1 +Versicherer 1 +Beamer-Handy 1 +Versenkung 1 +Handy 1 +ausverkauft 1 +klarzustellen 1 +Einzelhändler 1 +vorverpackte 1 +EU-Bestimmungen 1 +unterliegen 1 +Progressives 1 +Offset 1 +Ballflug 1 +getroffenen 1 +Bällen 1 +(Internet 1 +Sales 1 +Engine) 1 +gesammelt 1 +Websiten 1 +Applecroft 1 +Bed 1 +Breakfast 1 +Carlyon 1 +Austell 1 +Shumen 1 +thrakische 1 +roemische 1 +byzantische 1 +bulgarische 1 +Tempo 1 +Handelsliberalisierung 1 +gewesenen 1 +Exportsteigerungsraten 1 +Parlamentsdelegation 1 +Vidal-Quadras 1 +stellend 1 +ausgewogen 1 +Duty-free-Shop 1 +(08.00-16.00 1 +Wechselstube 1 +(06.00-23.00 1 +(08.30-16.00 1 +Tourist-Information 1 +(07.00-23.00 1 +Hotel-Reservierungsschalter 1 +Mietwagenschalter 1 +Wenig 1 +Einkaufsmöglichkeiten 1 +Terminal 1 +proximité: 1 +balade 1 +cheval 1 +courses 1 +camarguaises 1 +canoë 1 +Montpellier 1 +Massane 1 +‎ 1 +randonnée 1 +sur 1 +chemin 1 +Jacques 1 +Compostelle 1 +vélo 1 +pêche 1 +voile 1 +ski 1 +nautique 1 +voie 1 +domitienne 1 +concerts 1 +château 1 +visite 1 +domaines 1 +viticoles 1 +Exponat 1 +Praxisbezug 1 +her: 1 +Fertigungszelle 1 +ALLROUNDER 1 +370 1 +Spielzeug-Buggy 1 +vollautomatisch 1 +gespritzt 1 +montiert 1 +Aufträge 1 +Verlassen 1 +Depot 1 +diplomatischer 1 +Noten 1 +Kompromißanträgen 1 +Sozialdemokratischen 1 +Ausnahmegenehmigung 1 +vorsehen 1 +stehe 1 +Ozonschicht 1 +skeptisch 1 +Porto 1 +Região 1 +Norte 1 +(Region 1 +Norden). 1 +Medium-Parameter 1 +äquivalent 1 +media-Regel 1 +Silva 1 +Splendid 1 +Radhotel 1 +ältere 1 +Internationalisierung 1 +viersprachige 1 +Pflanzenabbildungen 1 +Seitenaufbaus 1 +57 1 +Episteme 1 +Konsortialpartner 1 +BAE 1 +Auftrag 1 +Saudi-Arabien 1 +72 1 +Eurofighter-Flugzeuge 1 +Auftragszuschläge 1 +Betriebsfunksysteme 1 +(PMR). 1 +Ermessensspielraum 1 +226 1 +Planet 1 +beisteuern 1 +UN-Sicherheitsrates 1 +mitverantwortlich 1 +First-Line-Therapien 1 +Behandlungsdauer 1 +Auftreten 1 +Resistenz 1 +Schätze 1 +Schund 1 +Scherben 1 +-die 1 +Überbleibsel 1 +Vorschein 1 +verwöhnte 1 +dezentem 1 +Flair 1 +Menschenrechtsfragen 1 +vorrangige 1 +strukturierte 1 +Zusammenarbeit: 1 +Novalis 1 +Erster 1 +Redewendung 1 +Prozeß 1 +Verbrennungsprozess 1 +Mercosur-Gespräche 1 +katastrophale 1 +Rahmenrichtlinie 1 +Wasserpolitik 1 +Meeresstrategie 1 +Meeres- 1 +Süßwasserumwelt 1 +F: 1 +Prozessorgeschwindigkeit 1 +erreiche 1 +tiefere 1 +SWF-Video 1 +TCP 1 +IP-Socket 1 +ankommenden 1 +XML-Nachrichten 1 +auszuwerten 1 +Mallorquinisches 1 +Herrenhaus 1 +Barockstil 1 +raffiniert 1 +Location 1 +ausgewähltes 1 +umgestaltet 1 +bedauere 1 +bedrohliches 1 +Gespenst 1 +Chauvinismus 1 +umgeht 1 +Materie 1 +bedeutsam 1 +tief 1 +greifende 1 +Potenziale 1 +Routen 1 +Momentan 1 +kleinlichen 1 +Rechnereien 1 +Wald 1 +lauter 1 +Wahlerfolg 1 +Legitimität 1 +erzeugt 1 +erübrigt 1 +zumal 1 +Markttransaktion 1 +Beziehung 1 +freiwillig 1 +Zukunftsängste 1 +erstickende 1 +Bevormundung 1 +Malaise 1 +zugrunde 1 +Subdomains 1 +Klosterbräu 1 +überarbeiteten 1 +aufzuwarten 1 +hervorbringen 1 +springen 1 +herzen 1 +coloring 1 +box-spiele 1 +gitarre 1 +tauchen-spiele 1 +stylism 1 +shopping 1 +stundenlang 1 +einkaufen 1 +madchen 1 +spas 1 +Councillor 1 +Hamann 1 +Filmen 1 +Revolutions 1 +Reloaded 1 +(2003) 1 +Milton 1 +Krest 1 +James 1 +007 1 +Töten 1 +(1989) 1 +St.Gallen 1 +Ragaz 1 +Rorschach 1 +Hemberg 1 +Bächli 1 +Alt 1 +Johann 1 +potenzielle 1 +Privatanleger 1 +Kapital 1 +Immobilien 1 +Mammutkonzernen 1 +unzugänglich 1 +Segel- 1 +Surfschulen 1 +geforscht 1 +zirkuliert 1 +Tuch 1 +Luftaustausch 1 +Rollervermietung 1 +möchtest 1 +PREISE 1 +UND 1 +KONDITIONEN 1 +425 1 +Einzelchoreographien 1 +überwiegende 1 +Balletttheater 1 +Dreißiger- 1 +Vierzigerjahren 1 +Revuen 1 +Filme 1 +choreographiert 1 +Frauenrechte 1 +Gebirgsluft 1 +(Galtür 1 +Luftkurort 1 +Tirol), 1 +spürbar 1 +bekanntlich 1 +unterbreitet: 1 +zeitweilige 1 +Aufenthaltsgenehmigung 1 +Massenflucht 1 +Al-Dschasira 1 +Kanäle 1 +Aerbawip 1 +wiederholte 1 +Gruppenherbergen 1 +Gruppenreisen 1 +Herrliche 1 +Korallenatolle 1 +vielfältiger 1 +Flora 1 +Fauna 1 +Verweilen 1 +litauische 1 +dringlicher 1 +Relevanz: 1 +schutzbedürftiger 1 +Erwachsenenlebens 1 +Frauenorganisationen 1 +schwerwiegend 1 +erkrankt 1 +menschenverachtende 1 +Militärjunta 1 +nächstes 1 +Herausforderin 1 +Wunderbarer 1 +Mieteinkommen 1 +Darauf 1 +Entlastung 1 +EEF 1 +18-monatige 1 +Justizministeriums 1 +wies 1 +Zivilverfahrens 1 +Hörensagen 1 +voreingenommenen 1 +Zeugen 1 +basiere 1 +Tuberoza 1 +pazifischen 1 +Anziehungskräften 1 +Zakopane 1 +pendii 1 +Eisrochen 1 +Haushaltsverfahrens 1 +öfter 1 +Touristenwohnungen 1 +Marina 1 +Romea 1 +sitzt 1 +osmanischen 1 +Eroberung 1 +behielt 1 +1930 1 +volkstümlichen 1 +İstanbul 1 +umbenannt 1 +Selbstentfaltung 1 +intensiven 1 +Begegnung 1 +unnötig 1 +beschädigt 1 +Ratspräsidentschaften 1 +kräftig 1 +mitarbeiten 1 +breiter 1 +Buchen 1 +Tagestouren 1 +MCM-Tours 1 +Salzburg 1 +Stadtrundfahrt 1 +Abgleichung 1 +Gesetzen 1 +Sachverhalt 1 +tangieren 1 +oberstes 1 +Gebot 1 +Aid 1 +for 1 +trade 1 +Stabilisierung 1 +Entwicklungsstrategie 1 +unterminiert 1 +Legt 1 +Diffie-Hellman 1 +Gruppennummer 1 +dynamisch 1 +generierter 1 +temporärer 1 +(Session 1 +Keys). 1 +Oberaufsicht 1 +Goethes 1 +verdoppelte 1 +1832 1 +130.000 1 +Bände 1 +Bibliothek 1 +zählte 1 +Boutiquehotel 1 +Opera 1 +Garnier 1 +Kaufhaus 1 +Galeries 1 +Lafayette 1 +zurückzuziehen 1 +abzuändern 1 +ältesten 1 +Prag 1 +Wirtschaftsstandort 1 +Verkehrsknotenpunkt 1 +unterziehen 1 +Arzt 1 +Einnahme 1 +Proscar 1 +unverzichtbar 1 +Analysieren 1 +Footprint-Berechnungen 1 +Berechnungsmethode 1 +Interpretationsfähigkeit 1 +Indikators 1 +fragmentarische 1 +Emotionen 1 +Leidenschaften 1 +eindrucksvoll 1 +Blickfeld 1 +Finanzamt 1 +Spendenhöhe 1 +Zuwendungsbestätigung 1 +Marokko 1 +leistet 1 +Maghreb 1 +beschenkt 1 +ließen 1 +Freerider 1 +Dörfer 1 +Dorfgasthäusern 1 +einheimische 1 +Mautstelle 1 +Imola 1 +Hinweisschildern 1 +Bagnara 1 +Romagna 1 +Sammlern 1 +SB-Warenhaus 1 +METRO 1 +GROUP 1 +Einzelhandels-Vertriebsmarken 1 +Extra 1 +Dessenungeachtet 1 +vorbereiten 1 +Erweiterungsrunde 1 +vollenden 1 +herrlichen 1 +Naturarena 1 +Innerschweizer- 1 +Berner- 1 +wechselnden 1 +Turnus 1 +Nordost- 1 +Nordwest- 1 +Südwestschweizer 1 +Schwinger 1 +Wettkampf 1 +begehrten 1 +Brünigkranz 1 +Adelige 1 +Holzinteressen 1 +Zaren 1 +widersetzen 1 +Speyside 1 +Single 1 +Malt 1 +komplexen 1 +Artikel-29-Datenschutzgruppe 1 +Suchmaschinen 1 +Vorhaltezeit 1 +dramatisch 1 +Innenhof 1 +Diskussionsteilnehmer 1 +merkten 1 +populärwissenschaftliche 1 +gewisser 1 +Wirtschaftswissenschaftlern 1 +breiteren 1 +Beli 1 +Kamik 1 +i 1 +Vertreten 1 +Anbetracht 1 +morgigen 1 +Aktionsplan 1 +Bangemann 1 +Textilindustrie 1 +EUROCOTON 1 +Managementkomitees 1 +Ursache 1 +TACIS-Programms 1 +Wochenbucher 1 +E-Business 1 +Trend 1 +Onlineüberweisung 1 +Reihenhaus 1 +Alcossebre 1 +verkaufen 1 +masturbiere 1 +Blase 1 +Pissen 1 +ansich 1 +Orgasmus 1 +Schätzen 1 +Koeffizienten 1 +Unterstellen 1 +proportionalen 1 +Ausfallrate 1 +ermittelt 1 +Meinungs- 1 +zunehmende 1 +Gesetze 1 +Internets 1 +Oppositionelle 1 +verursachen 1 +Verschlechterung 1 +Menschenrechtssituation 1 +Sünden 1 +umkehrt 1 +Unglauben 1 +Verblendung 1 +Herzens 1 +falle 1 +zweigeschossige 1 +zweiflügelige 1 +Neurenaissance-Gebäude 1 +hebt 1 +stolzer 1 +polygonaler 1 +Eckturm 1 +Uhrwerk 1 +hervor 1 +(weitere 1 +auszuüben 1 +Gemeinschaften 1 +mobilisieren). 1 +Muschelschalen 1 +Abfischung 1 +Vorrichtung 1 +Perle 1 +silbernen 1 +Löffel 1 +rausgeholt 1 +Glanz 1 +Berührung 1 +TV-Dienste 1 +abonnieren 1 +Stabilitätsfonds 1 +permanenten 1 +Stabilitätsmechanismus 1 +(ESM) 1 +Auffangsystem 1 +nötigenfalls 1 +Euroraumes 1 +Unruhen 1 +Wanderungsewegungen 1 +1991 1 +kursieren 1 +ernstzunehmende 1 +Zweifel 1 +Innenministerium 1 +würdigen 1 +arktische 1 +undurchdringlich 1 +Telesina 1 +Einsatzrunden 1 +EU-Mitgliedsländer 1 +international 1 +verflochtenen 1 +USt) 1 +Hauptfinanzierungsquellen 1 +Unterbindet 1 +Zwischenspeicherung 1 +Abfrageergebnisse 1 +(d. 1 +Datensatz 1 +angezeigt). 1 +Maßen 1 +gedeihen 1 +herzhaften 1 +erlebnisreichen 1 +vorlege 1 +Ausschusses 1 +Konvergenzkriterien 1 +Handlungsspielraum 1 +Budgets 1 +sensibles 1 +Thematisch 1 +mitzuhalten 1 +Autofahrt 1 +Bibbona 1 +Bolgheri 1 +Castagneto 1 +Carducci 1 +Castiglioncello 1 +diesbezüglichen 1 +Formulierungen 1 +Präambel 1 +Mandatsentwurfs 1 +Schlussbestimmungen 1 +Suspendierung 1 +schwerwiegender 1 +Verstöße 1 +akademischen 1 +Hauptgründe 1 +Cookies 1 +konstitutionelle 1 +erarbeitete 1 +Flickschusterei 1 +juristisches 1 +Monstrum 1 +Gefolge 1 +überraschenden 1 +Wendungen 1 +angenommenen 1 +(EG) 1 +vote 1 +1384 1 +Abrede 1 +Hohes 1 +MathML 1 +Spezifikationen 1 +mathematischer 1 +Gleichungen 1 +Internetseiten 1 +TCS 1 +Campingführer 1 +Jubiläumsausgabe 1 +deutsch 1 +350 1 +Campings 1 +10.00 1 +(Beifall) 1 +Poetterings 1 +Ernennung 1 +instrumentalisieren 1 +Euphorischer 1 +bombastischer 1 +Orchester-Pop 1 +Abwechslung 1 +echtem 1 +Schlagzeug 1 +Mainufers 1 +Inhaltsstoffe 1 +schlechter 1 +Firmenkonzept 1 +Problemlösungen 1 +enger 1 +Anwender 1 +Sprecher 1 +meistens 1 +dreisprachig 1 +Marokkanisch 1 +Erleben 1 +Hamburg 1 +erwähnten 1 +Prüfungen 1 +decken 1 +Viertens 1 +letztens 1 +Schocks 1 +Asymmetrie 1 +wirtschaftspolitisch 1 +bedingten 1 +symmetrische 1 +Auslöser 1 +resultiert 1 +Beratungsstil 1 +bauen 1 +langfristige 1 +Kundenbeziehungen 1 +unverzichtbarer 1 +Küstengemeinden 1 +zusammenschmiedet 1 +stimuliert 1 +nachgeht 1 +Pendels 1 +vorgeht 1 +fehlende 1 +Besetzung 1 +Entscheidungspositionen 1 +kritisieren 1 +gehindert 1 +männlichen 1 +Mitbewerbern 1 +konkurrieren 1 +musikalische 1 +Disco 1 +Mythos 1 +Moroder 1 +Donna 1 +Summer 1 +Hr 1 +Alfred 1 +Pernold 1 +Referent 1 +Hochseesegeln 1 +OeSV 1 +vernünftiger 1 +Forschungsprogrammen 1 +Stellung 1 +ethische 1 +hinterher 1 +Fiori 1 +verlöre 1 +Reservierungen 1 +Hyatt-Gold-Passport-Kontonummer 1 +versprochen 1 +Milchsektor 1 +verhandelten 1 +Juniliste 1 +Demokratischen 1 +Kongo 1 +Außen- 1 +Sicherheitspolitik 1 +hotel 1 +Gehminuten 1 +Kensington 1 +Wohntraum 1 +73m 1 +Kaminofen 1 +Turniers 1 +sehenswert: 1 +Umzug 1 +teilnehmenden 1 +Conciliazione 1 +Petersplatz 1 +Papst 1 +Grußwort 1 +Weitergabe 1 +Recherchen 1 +ausdrücklichen 1 +schriftlichen 1 +EROMM-Lenkungsausschuss 1 +ernährt 1 +zeigte 1 +aufgeworfen 1 +Bescheid 1 +Aussagen 1 +Firmengründungen 1 +Übernahmen 1 +Geschäftszweige 1 +Stanort 1 +Abfluss 1 +White 1 +Glacier 1 +Schmelzwasserbäche 1 +einigermassen 1 +überquert 1 +Zuständigkeiten 1 +entrichtete 1 +abzüglich 1 +Standardsoftware 1 +berücksichtigenden 1 +Betrages 1 +zurückzuerstatten 1 +extremistischen 1 +Oranier-Orden 1 +tödlichen 1 +protestantische 1 +verübten 1 +Katholikin 1 +Metaponto 1 +archaeologische 1 +Area 1 +nahliegende 1 +praehistorische 1 +grischischen 1 +Kolonisierung 1 +VIII 1 +v.C. 1 +MMORPG 1 +Winch 1 +Gate 1 +Property 1 +Limited 1 +verpflichtetem 1 +Minimum 1 +herausgeht 1 +gratuliere 1 +Vorsitz 1 +Speisen: 1 +8,00 1 +unklug 1 +utopischen 1 +legitimen 1 +Ängsten 1 +begründet 1 +fremd 1 +unbekannt 1 +Po-pod 1 +Po-Dateien 1 +Pod-Dokumentation 1 +übersetzen 1 +Flitterwochen 1 +Fest 1 +intimer 1 +abgeschiedener 1 +wunderschöner 1 +Malediven 1 +Kohlendioxidsteuer: 1 +glaubwürdig 1 +Inflationserwartungen 1 +günstiges 1 +finanzpolitisches 1 +Beschlussfähigkeit 1 +Politisierung 1 +stattfindet 1 +Kaiserstadt 1 +Versicherungsprämie 1 +müsste 1 +angepasst 1 +ändernden 1 +Wissensstand 1 +Wahrscheinlichkeit 1 +Rezession 1 +Policen 1 +kündigen 1 +Prämienzahlungen 1 +Aussichten 1 +rosiger 1 +niederländische 1 +da: 1 +Hausaufgaben 1 +übertriebene 1 +trieb 1 +Binnenwirtschaft 1 +Umsätze 1 +Zuwachs 1 +4,9 1 +angeschnittenen 1 +Plebejische 1 +römischen 1 +Ordnungskämpfen 1 +Patriziern 1 +Plebejern 1 +erwachsenden 1 +Immunisierung 1 +Malaga 1 +clicken 1 +10.000 1 +BTSE 1 +Luftstreitkräften 1 +derjenigen 1 +bestätigen 1 +appellieren 1 +amerikanischer 1 +schwierigstes 1 +bedauerlicherweise 1 +unparteiisch 1 +Egal 1 +Designer-Räumen 1 +Ramelleres 1 +Terraza 1 +zustande 1 +verschärft 1 +schadet 1 +multilateralen 1 +Handelssystems 1 +erkannt 1 +betreffende 1 +Übergangsfrist 1 +verlängert 1 +16. 1 +Runewalker 1 +tapferen 1 +Helden 1 +Cogadh 1 +Duell 1 +Entwaffnung 1 +Miliz 1 +vorangekommen 1 +angegriffen 1 +Veranstaltungs-Team 1 +professionell 1 +Konferenz- 1 +Banketträume 1 +Tagungen 1 +Konferenzen 1 +Empfänge 1 +10-450 1 +Unterkünfte 1 +Hostelsclub 1 +Unterstützungsbemühungen 1 +Getragen 1 +Steuerentlastungen 1 +expandierte 1 +gewerblichen 1 +zeigten 1 +Zuwächse 1 +AMC-Gent 1 +externe 1 +site 1 +siehst 1 +zeitlich 1 +geistig 1 +waren; 1 +untertan 1 +Beschlußprinzip 1 +Übernahmemaßnahmen 1 +Mehrheitsbeschluß 1 +alleine 1 +Mitten 1 +Schlacht 1 +Formation 1 +vorbeimarschieren 1 +Paradeplatz 1 +Griff 1 +Bürokratieapparat 1 +wirksam 1 +abbaut 1 +Parlamentsstandorte 1 +Meinungsbildung 1 +vormittag 1 +Fernsehen 1 +subjektive 1 +ethnische 1 +Zugehörigkeit 1 +Ethnizität 1 +Daraus 1 +Treu 1 +Europaäschen 1 +(Christdemokraten) 1 +wassergekühlte 1 +Zwei-Zylinder-Dieseltraktor 1 +26PS 1 +Baukastensystem 1 +Steyr-Diesel-LKW 1 +19,00 1 +2.00 1 +Paramilitärs 1 +verübt 1 +keinesfalls 1 +Guerillas 1 +Missachtung 1 +Dasselbe 1 +annehmbar 1 +Gedanke 1 +inhaltlich 1 +Anuncios 1 +Clasificados 1 +Gratuitos 1 +Compra 1 +Venta 1 +Regalos 1 +Contactos 1 +Bolsa 1 +Trabajo 1 +Servicios 1 +Gratis 1 +verglasten 1 +Aufenthaltsbereich 1 +traumhafte 1 +Außenminister 1 +abgegeben 1 +Rente 1 +Hochschulführer 1 +DAAD 1 +Hochschulrektorenkonferenz 1 +(HRK) 1 +studieren 1 +westliche 1 +sowjetische 1 +Mail 1 +ungewöhnlich 1 +Migrationsströme 1 +massiven 1 +Spannungen 1 +Herkunfts- 1 +Aufnahmeländern 1 +Transitländern 1 +Aufnahmeländer 1 +Reenactment 1 +faszinierendsten 1 +Legenden 1 +Elba 1 +Prozession 1 +debattiert 1 +Kapitalverkehr 1 +kurzfristigen 1 +Schattenberichterstattern 1 +durften 1 +Verbraucherbarometer 1 +Informations- 1 +Kommunikationstechnologien 1 +Sicherheitskultur 1 +Anthea 1 +abwechslungsreichen 1 +Speisenangebot 1 +kohärentes 1 +Zusammenspiel 1 +Akteure 1 +Source 1 +Absicherung 1 +Kollektionslinien 1 +AKTIV 1 +SENSITIV 1 +Herrenmodellen 1 +vollzogen 1 +Boot-Sequenz 1 +Netzwerk-Dienste 1 +Unterrubrik 1 +änderten 1 +PS3 1 +-System 1 +Store 1 +(PC-Version) 1 +vorhandene 1 +Strandeinrichtungen 1 +Tennis- 1 +Golfplätze 1 +Spenden 1 +Terrorfinanzierung 1 +missbraucht 1 +zweckmäßig 1 +auszudehnen 1 +Energieverbrauch 1 +Ressourcenschonung 1 +Geländetypen 1 +Kader 1 +angepasste 1 +Trainingsstunden 1 +platz 1 +Guarulhos 1 +Panamby 1 +sonnigsten 1 +Merans 1 +jeher 1 +motorgetriebenen 1 +Tellerscheiben 1 +Spulenachsen 1 +Tillich-Mulder 1 +Pandoras 1 +Büchse 1 +randvoll 1 +Familienproblemen 1 +erstaunlich 1 +Mehrzahl 1 +Asylbewerber 1 +Wahlkreis 1 +Zahlungen 1 +Kreditkarten 1 +berechnet 1 +Gebühr 1 +(dies 1 +Debit-Karten). 1 +Back 1 +Office-System 1 +Kundenbereich 1 +konkretisiert 1 +Isolation 1 +falscher 1 +Demokratien 1 +durchbrochen 1 +5250-Grenze 1 +RIA 1 +iPad 1 +Android 1 +Mobiletelefone 1 +RPG-Entwickler 1 +2-Zimmer-Wohung 1 +Etage 1 +Hauptgebäudes 1 +Landguts 1 +Poggetto 1 +Südwesten 1 +Panoramablick 1 +schönste 1 +Hügellandschaft 1 +Toskana 1 +mittelalterliche 1 +Gimignano 1 +Álvar 1 +Núñez 1 +Cabeza 1 +Vaca 1 +1490 1 +Jerez 1 +Frontera 1 +(Andalusien); 1 +ca 1 +1557 1 +Sevilla) 1 +Seefahrer 1 +Entdecker 1 +Rats 1 +geklopft 1 +Hughes 1 +Numericable 1 +DANCE 1 +Soundtrack 1 +vernommen 1 +Wohlergehen 1 +Brüder 1 +Nephiten 1 +wach; 1 +schüttete 1 +Seele 1 +Luftfahrt 1 +kriminelle 1 +entführt 1 +aufgefunden 1 +erlebt 1 +vorangegangenen 1 +Birma 1 +Übrigens 1 +Richtungen 1 +moralische 1 +wiederholtes 1 +errungene 1 +spät 1 +nahen 1 +Chillon 1 +(an 1 +Zugbrücke 1 +Flaschenzüge 1 +sind). 1 +Steuerumgehung 1 +Zins- 1 +Mehrwertsteuerbesteuerung 1 +Folgendes 1 +lenken: 1 +Baby 1 +Brittannica 1 +Brittasgang 1 +V2 1 +res 1 +angestrebte 1 +eingenommenen 1 +vordringlich 1 +umverteilt 1 +Bowis 1 +aufrichtigen 1 +Glückwünsche 1 +übermitteln: 1 +teile 1 +Übergangs 1 +Mittelweg 1 +moechten 1 +Call 1 +39 1 +02 1 +89057168 1 +Betreiber 1 +schlagen 1 +Rotondo 1 +Inger 1 +Segelström 1 +Andersson 1 +Anna 1 +Hedh 1 +Ewa 1 +Hedkvist 1 +Petersen 1 +Åsa 1 +Westlund 1 +abweichende 1 +E-Mail-Adresse 1 +hast 1 +zurücksetzen 1 +Innerhalb 1 +zugesandt 1 +Bellariva 1 +Servicen 1 +Animation 1 +Skonten 1 +Vergnügungsparks 1 +Abmachungen 1 +gelegenen 1 +Badeeinrichtungen 1 +Firth 1 +Forth 1 +Beschränkung 1 +großenteils 1 +unbegründet 1 +Sunworld 1 +Safarispezialist 1 +Tanzania 1 +begrüßen: 1 +Ignasi 1 +Guardans 1 +Daniel 1 +Caspary 1 +Béla 1 +Glattfelder 1 +Syed 1 +Kamall 1 +Sajjad 1 +Karim 1 +Erika 1 +Tadeusz 1 +Masiel 1 +Mia 1 +Vits 1 +Sitia 1 +Agia 1 +Fotia 1 +spät: 1 +Marathon 1 +5-6 1 +Alkohol- 1 +Tabaksucht 1 +Friedensvermittler 1 +friedliche 1 +Lanka 1 +Bevölkerungsgruppen 1 +Tamilen 1 +Singhalesen 1 +Lankas 1 +Lebensmitteln 1 +intensiviert 1 +Schilder 1 +eisenhaltigen 1 +Quellen 1 +renommiert 1 +Thermalwesens 1 +Bresimo 1 +Einhalt 1 +gebieten 1 +Option 1 +Erweitern 1 +bestehendes 1 +SONIC 1 +CORE 1 +DSP-System 1 +Mixdown 1 +höchster 1 +Sälen 1 +Kopierer 1 +Multimediaprojektor 1 +Übung 1 +merken 1 +einzutragen 1 +Booking.com: 1 +Vip 1 +Executive 1 +452 1 +isoliert 1 +Dankeschön 1 +Brunetta 1 +Kommunikationsnetzen 1 +zugehörigen 1 +Gesamtpakets 1 +open 1 +network 1 +provisions 1 +Balkan 1 +ermuntern 1 +Integrationsbemühungen 1 +Entwicklungsfragen 1 +OeAD-GmbH 1 +Bundesministerium 1 +(BMWF) 1 +verbreiten 1 +beurteilt 1 +wettbewerbsmäßigen 1 +Normen- 1 +Zertifizierungsvorschriften 1 +einleiten 1 +begonnene 1 +verabscheuungswürdigen 1 +Diktator 1 +Kodierungsalgorithmen 1 +base64 1 +uuencode 1 +Codegroup 1 +Alphabets 1 +Geduld 1 +Exim 1 +Optionen 1 +Separierung 1 +Verpackungsverarbeitung 1 +Entschärfung 1 +Trennung 1 +verarbeitungsfähigen 1 +(Metall 1 +Papier) 1 +Zusammensetzung 1 +gelieferten 1 +Gesamtmüllmengen 1 +Deponie 1 +vermindern 1 +Bäckers 1 +Drei 1 +Arbeitsorte 1 +Hin- 1 +Herreiserei 1 +Geld- 1 +Zeitverschwendung 1 +JSP-Produkte 1 +Stoßdämpfung 1 +Wasserableitung 1 +Sportstätten 1 +geringerem 1 +Arbeits- 1 +Zeitaufwand 1 +herkömmliche 1 +Bodenbeläge 1 +Gesteinsschichten 1 +Bildhauer 1 +dreidimensionalen 1 +Plastiken 1 +entfalten 1 +Superniere 1 +Richtwirkung 1 +Druckgradientenempfänger 1 +Braumeisters 1 +wohldosierte 1 +nennt 1 +Braumeister 1 +frühere 1 +Jungbier 1 +Vorauszahlung 1 +akzeptable 1 +vorweisen 1 +getretenen 1 +Ozon 1 +frischen 1 +Symptomatisch 1 +selektiven 1 +Regimekritiker 1 +Sozialismus 1 +opportunistischen 1 +Verrätern 1 +Ideale 1 +ethnischer 1 +Teleologie 1 +raisons 1 +etat 1 +Privatdusche 1 +W.C. 1 +2318 1 +Rostyl 1 +Company 1 +größsten 1 +Bestattungsprodukten 1 +diensten 1 +Überlegung: 1 +unterschätzt 1 +Rolllager 1 +Sass 1 +Rigais 1 +Col 1 +Raiser 1 +Zertifizierungsstellen 1 +Zertifikate 1 +Haftungsregelungen 1 +grenzüberschreitenden 1 +wünsche 1 +schmerzvoll 1 +bizarre 1 +Unterhaltung 1 +artig 1 +Nadeln 1 +Bollwerk 1 +ausgeglichenes 1 +Spielfeld 1 +Humanitäre 1 +hinausgehende 1 +Asienkrise 1 +Photo 1 +Cairo 1 +Home 1 +Works 1 +Beirut 1 +Sharjah 1 +Biennale 1 +Plattformen 1 +zeitgenössischer 1 +Kunstpraxis 1 +kritischen 1 +Grundlagen 1 +Hauptthema 1 +großformatige 1 +Außenwerbebanner 1 +gedruckt 1 +Aug 1 +Staatsgeheimnisse 1 +Entscheidungsfindung 1 +Kantone 1 +Gemeinden 1 +gegliedert 1 +Schrift 1 +Straßenschilder 1 +Hausnummern 1 +überflüssigen 1 +Energieabhängigkeit 1 +Bauwesen 1 +vergrößert 1 +Jahresgewinn 1 +8,1 1 +herrschten 1 +versäumten 1 +Vergessen 1 +geniessen 1 +Jaca 1 +Laufer 1 +mehrerer 1 +Jahrhunderte 1 +Teile 1 +Europarats 1 +Stolz 1 +wählt 1 +geeigneten 1 +Lenkmodus 1 +einfaches 1 +Umlegen 1 +Schalters 1 +Kabine 1 +Codec 1 +Videodateien 1 +Dispone 1 +cocina-comedor 1 +baños 1 +completos 1 +habitaciones: 1 +matrimonio 1 +con 1 +camas 1 +individuales 1 +cada 1 +ellas 1 +Zarja 1 +Qufu: 1 +PI 1 +FA 1 +SHANG 1 +CHENG 1 +Yan-Tempel 1 +Sätzen 1 +Wohnsituation 1 +beschreiben 1 +unbefriedigend 1 +Linguistik 1 +Genus 1 +morphologische 1 +Substantive 1 +Verbesserungen 1 +VTE 1 +verbraucht 1 +GNOME-Terminal 1 +Speicher 1 +Kaufmann 1 +Mängel 1 +rügen 1 +George 1 +Lehrplan 1 +zunehmender 1 +lauten 1 +Stühle 1 +Hausordnung 1 +aufzustellen 1 +Spanischsprachkenntnisse 1 +Deutsch 1 +Tina: 1 +übrigen 1 +MEDIA 1 +Plus-Programm 1 +positiver 1 +Fachkreise 1 +Produktionskapazitäten 1 +Sprachraum 1 +zweiwöchige 1 +Intensivkurs 1 +„ 1 +Basic 1 +Skills 1 +(BMS) 1 +“ 1 +ETH 1 +Sondieren 1 +Edelstein 1 +Salam 1 +altgedienten 1 +respektierten 1 +Wirtschaftswissenschaftler 1 +Ministerpräsidenten 1 +jemanden 1 +hart 1 +organisieren 1 +zurückschlagen 1 +städtischen 1 +Strukturpolitik 1 +beimisst 1 +Gütesiegel 1 +weisen 1 +getestet 1 +Bauproduktes 1 +innovative 1 +Betriebsführungskonzept 1 +IT-Preis 1 +Constantinus 1 +ausgezeichnet 1 +Download 1 +JMover 1 +AGB 1 +geraubte 1 +Busse 1 +wiederhergestellt 1 +(Tötung) 1 +Vergebung 1 +(Ndera 1 +e 1 +marrun 1 +nuk 1 +shperblehet 1 +gja 1 +por 1 +derdhun 1 +gjakut 1 +falun 1 +fisnikërisht 1 +ECHO 1 +plant 1 +Vertriebenen 1 +Frontverläufen 1 +namentlich 1 +Panshir-Tal 1 +Vertriebene 1 +Kampfhandlungen 1 +Kabul 1 +schiere 1 +Großzügigkeit 1 +hellen 1 +luftigen 1 +Interieur 1 +mangelnde 1 +Finanzmittlern 1 +spezifische 1 +Steuerflucht 1 +-betrug 1 +sichtbarem 1 +Blaulicht 1 +wilden 1 +Polizistengesten 1 +inflativ 1 +Marktgemeinde 1 +verwaltungsmäßiges 1 +schulisches 1 +Zillertals 1 +Aspekt 1 +Westsahara-Problems 1 +Derzeit 1 +EOS 1 +Borg 1 +unterdrückt 1 +Worf 1 +Avenger 1 +getarnte 1 +Task-Force 1 +klingonischen 1 +romulanischen 1 +Schiffen 1 +Unimatrix 1 +vordringen 1 +erneuten 1 +Enthusiasmus 1 +wecken 1 +wahrhaften 1 +Nöte 1 +hört 1 +Zeug 1 +fertigzuwerden 1 +Quote 1 +Gesamtproduktion 1 +gaben 1 +Schriftsystem 1 +Sogdiern 1 +Dschingis 1 +Khans 1 +Buddhismus 1 +vermittelten 1 +zeichnen 1 +bestenfalls 1 +vorwärts 1 +Echternacher 1 +Springprozession 1 +beschleunigte 1 +mittelfristigen 1 +Haushaltsziele 1 +prozyklische 1 +Haushaltsmaßnahmen 1 +vermieden 1 +Häfen 1 +Hinterland 1 +gegenseitig 1 +verlorengehen 1 +Fischarten 1 +paradox 1 +plötzlich 1 +Sonderrichtlinie 1 +Sportlebensmittel 1 +repostería 1 +confitería 1 +Chiclana 1 +tienen 1 +merecida 1 +fama 1 +zona 1 +scuba 1 +collection 1 +STEFAN 1 +WIESSMEYER 1 +schmuck 1 +PFOS 1 +PFOS-Derivate 1 +hochfluoriert 1 +abbaubar 1 +Gesamtheit 1 +Einzelnen 1 +Haftbefehle 1 +CIA-Agenten 1 +italienische 1 +Auslieferungsersuchen 1 +anziehen 1 +Internetterminal 1 +Katastrophen 1 +neigt 1 +Schuldige 1 +Andreasen 1 +ausgezeichneter 1 +Medizinische 1 +(DIMDI), 1 +nachgeordnete 1 +Bundesministeriums 1 +(BMG), 1 +Chinesen 1 +Phönizier 1 +Griechen 1 +Portugiesen 1 +Armenier 1 +Araber 1 +verkehrten 1 +tauschten 1 +Mineralwasserquellen 1 +Stoffwechselstörungen 1 +Verdauungsprobleme 1 +einerseits 1 +Ad-hoc-Ausschusses 1 +Gnadenerlassen 1 +Unrecht 1 +terroristischer 1 +angeklagter 1 +Gefangener 1 +uneingeschränkte 1 +richterliche 1 +Entschädigungszahlungen 1 +Menschenrechtsgesetzen 1 +brennenden 1 +getrocknet: 1 +ewigen 1 +Capris 1 +Ungleichheit 1 +bekämpft 1 +Jugendarbeitslosigkeit 1 +Übergang 1 +wissensbasierten 1 +erleichtert 1 +übereinstimme 1 +25.11.2009 1 +Materialprüfanstalt 1 +Nordrhein-Westfalen 1 +Glas-Alu-Fassadenelemente 1 +Vakuum-Dämmkern 1 +Prüfprocedere 1 +gem 1 +Ökosystem 1 +weiblichen 1 +wärmstens 1 +Engagement 1 +schlimmste 1 +Teil: 1 +Windows-Papierkorb 1 +fängt 1 +gelöschten 1 +Spey 1 +Valley 1 +Top 1 +schottischen 1 +platziert 1 +80% 1 +Patientinnen 1 +ästhetischen 1 +langfristigen 1 +Perspektiven 1 +Kehrseite 1 +Porzellanladen 1 +stampft 1 +Chevrolet 1 +Monte 1 +Carlo 1 +Ford 1 +Fusion 1 +Dodge 1 +Charger 1 +PS 1 +Hochgeschwindigkeits-Ovalen 1 +Luftmengenbegrenzer 1 +(us-engl 1 +restaurant 1 +point 1 +rencontre 1 +idéal 1 +pour 1 +moments 1 +grande 1 +tranquillité 1 +Gemütliches 1 +komfortables 1 +allererst 1 +Überzeugung 1 +Kinderspielzeug 1 +wegzulassen 1 +Kombinationen 1 +TV-Dokumentationen 1 +neural 1 +networks 1 +1860 1 +Teas 1 +dekadente 1 +Tee 1 +Häppchen 1 +Porzellan 1 +getrunken 1 +gegessen 1 +Umweltprobleme 1 +Spanisch 1 +Hauptsprache 1 +Festland 1 +Islands 1 +Muttersprache 1 +gesprochen; 1 +Zweitsprache 1 +feature 1 +hilfreich 1 +implementieren 1 +lag 1 +39,7 1 +Mrd 1 +2% 1 +Vorjahreswert 1 +Sitemap: 1 +Bietet 1 +Etschtal 1 +Einsteigen 1 +Begleiterfamilie 1 +Hütte 1 +slowenischen 1 +tschechischen 1 +wüsste 1 +wunderbar 1 +Leitfigur 1 +befolgen 1 +beantragen 1 +aufbauend 1 +Andi 1 +Rasmus 1 +Zeev 1 +kündigten 1 +3.0 1 +offiziellen 1 +Nachfolger 1 +Starcraft 1 +privates 1 +ankert 1 +Betreten 1 +Holzverzierungen 1 +antiken 1 +Wänden 1 +Strandspaziergang 1 +Haut 1 +Osuna 1 +Крім 1 +того 1 +ми 1 +рекомендуємо 1 +пограти 1 +також 1 +з 1 +Румунську 1 +Німецьку 1 +уроками 1 +unerwartete 1 +Tsatsos-Bericht 1 +möge 1 +Nährboden 1 +blühendes 1 +Latour 1 +Maubourg 1 +besticht 1 +vorteilhafte 1 +Behälter 1 +wiederverwerten 1 +LEDON 1 +Lighting 1 +Zumtobel 1 +LED-Modulen 1 +Zollpräferenzsystem 1 +geschichtlichen 1 +pluralistischen 1 +Drucks 1 +polemischen 1 +Schlagabtausch 1 +kolonialer 1 +Unterdrücker 1 +imperialistischen 1 +darlegen 1 +Erwiderung 1 +Widerspruch 1 +Ansatzpunkten 1 +beleuchtet 1 +Isaresidence-Gästen 1 +Kontrollarmband 1 +ausgehändigt 1 +Zutritt 1 +Ferienanlage 1 +Isamar 1 +bedeutendsten 1 +Einkaufs- 1 +Unterhaltungsbezirke 1 +Konferenzzentren 1 +Odyssey 1 +Waterfront 1 +Hall 1 +Beispielweise 1 +überbelichtet 1 +Vordergrund 1 +infolgedessen 1 +sichtbar 1 +Detailarmut 1 +Ausdruckskraft 1 +Crespi 1 +Fuer 1 +Sonderangebot 1 +zielen 1 +endet 1 +eBook 1 +Hintergrundmusik 1 +untermalt 1 +Flamencoklänge 1 +durchfluten 1 +fügen 1 +harmonisch 1 +Dekoration 1 +Nachwuchstalente 1 +Vereinen 1 +zuteilen 1 +irreführende 1 +vergleichende 1 +Länder: 1 +(VR), 1 +Gambia 1 +Guinea-Bissau 1 +Hongkong 1 +(China), 1 +Kap 1 +Verde 1 +Liberia 1 +Senegal 1 +Leone 1 +renommierten 1 +Beauty- 1 +Anwendungsmethoden 1 +namhafter 1 +Kosmetikfirmen 1 +geschulten 1 +Fachkräfte 1 +Garanten 1 +Vitalität 1 +Möchten 1 +massgeschneiderte 1 +0amos 1 +Terroranschlag 1 +Vorwürfe 1 +Schiphol 1 +Gepäckaufgabestellen 1 +05.00 1 +Bienen 1 +Nahrungskette: 1 +Verschwinden 1 +Imkerei 1 +Getreide- 1 +Obst- 1 +Nussanbau 1 +Overtone 1 +Analyzer 1 +Obertonsänger 1 +Stücke 1 +einstudieren 1 +genaues 1 +Feedback 1 +Obertöne 1 +gesungen 1 +zusammengefügt 1 +leichten 1 +Belichtungsunterschieden 1 +exakt 1 +belichtet 1 +pragmatischen 1 +übertragbaren 1 +weiterkommen 1 +vordringliche 1 +staatlicher 1 +zugegeben 1 +Riesenproblem 1 +Spiromat 1 +Applet 1 +ausprobieren 1 +Dreimal 1 +Golden 1 +Globe 1 +fünfmal 1 +Endauswahl 1 +Emmy 1 +Drogensüchtigen 1 +Rezidiv 1 +defi 1 +niert 1 +SNA- 1 +SNB- 1 +ML 1 +NL-Winkels 1 +0,5 1 +T1 1 +T2 1 +durchschlagenden 1 +Realitäten 1 +seither 1 +Moratorium 1 +53 1 +grausam 1 +unmenschlich 1 +bezeichneten 1 +vorläufig 1 +ernsten 1 +übernommenen 1 +verschiedener 1 +degenerativer 1 +Alzheimer 1 +Parkinson 1 +explosionsartig 1 +zunimmt 1 diff --git a/docs/source/CONTRIBUTING.md b/docs/source/CONTRIBUTING.md index d4e4620a1f..7ad1425bdc 100644 --- a/docs/source/CONTRIBUTING.md +++ b/docs/source/CONTRIBUTING.md @@ -5,7 +5,7 @@ OpenNMT-py is a community developed project and we love developer contributions. ## Guidelines Before sending a PR, please do this checklist first: -- Please run `tools/pull_request_chk.sh` and fix any errors. When adding new functionality, also add tests to this script. Included checks: +- Please run `onmt/tests/pull_request_chk.sh` and fix any errors. When adding new functionality, also add tests to this script. Included checks: 1. flake8 check for coding style; 2. unittest; 3. continuous integration tests listed in `.travis.yml`. @@ -20,10 +20,12 @@ This makes it easy to include your contributions in the Sphinx documentation. An to autodoc your contributions in the API ``.rst`` files in the `docs/source` folder! If you do, check that your additions look right. +**How to build the docs locally?** ```bash cd docs # install some dependencies if necessary: # recommonmark, sphinx_rtd_theme, sphinxcontrib-bibtex +pip install requirements.txt make html firefox build/html/main.html # or your browser of choice ``` diff --git a/docs/source/FAQ.md b/docs/source/FAQ.md index 403afb3e04..6c35393544 100644 --- a/docs/source/FAQ.md +++ b/docs/source/FAQ.md @@ -1,90 +1,116 @@ -# FAQ + +All the example YAML configurations are partial. To get an overview of what this YAML configuration is you can start by reading the [Quickstart](quickstart) section. ## How do I use Pretrained embeddings (e.g. GloVe)? -Using vocabularies from OpenNMT-py preprocessing outputs, `embeddings_to_torch.py` to generate encoder and decoder embeddings initialized with GloVe's values. +This is handled in the initial steps of the `onmt_train` execution. + +Pretrained embeddings can be configured in the main YAML configuration file. -the script is a slightly modified version of ylhsieh's one2. +### Example -Usage: +1. Get GloVe files: -```shell -embeddings_to_torch.py [-h] [-emb_file_both EMB_FILE_BOTH] - [-emb_file_enc EMB_FILE_ENC] - [-emb_file_dec EMB_FILE_DEC] -output_file - OUTPUT_FILE -dict_file DICT_FILE [-verbose] - [-skip_lines SKIP_LINES] - [-type {GloVe,word2vec}] +```bash +mkdir "glove_dir" +wget http://nlp.stanford.edu/data/glove.6B.zip +unzip glove.6B.zip -d "glove_dir" ``` +2. Adapt the configuration: -Run embeddings_to_torch.py -h for more usagecomplete info. +```yaml +# .yaml -### Example + -1. Get GloVe files: +... + +# this means embeddings will be used for both encoder and decoder sides +both_embeddings: glove_dir/glove.6B.100d.txt +# to set src and tgt embeddings separately: +# src_embeddings: ... +# tgt_embeddings: ... + +# supported types: GloVe, word2vec +embeddings_type: "GloVe" + +# word_vec_size need to match with the pretrained embeddings dimensions +word_vec_size: 100 + +``` + +3. Train: - ```shell - mkdir "glove_dir" - wget http://nlp.stanford.edu/data/glove.6B.zip - unzip glove.6B.zip -d "glove_dir" - ``` - -2. Prepare data: - - ```shell - onmt_preprocess \ - -train_src data/train.src.txt \ - -train_tgt data/train.tgt.txt \ - -valid_src data/valid.src.txt \ - -valid_tgt data/valid.tgt.txt \ - -save_data data/data - ``` - -3. Prepare embeddings: - - ```shell - ./tools/embeddings_to_torch.py -emb_file_both "glove_dir/glove.6B.100d.txt" \ - -dict_file "data/data.vocab.pt" \ - -output_file "data/embeddings" - ``` - -4. Train using pre-trained embeddings: - - ```shell - onmt_train -save_model data/model \ - -batch_size 64 \ - -layers 2 \ - -rnn_size 200 \ - -word_vec_size 100 \ - -pre_word_vecs_enc "data/embeddings.enc.pt" \ - -pre_word_vecs_dec "data/embeddings.dec.pt" \ - -data data/data - ``` +```bash +onmt_train -config .yaml +``` + +Notes: +- the matched embeddings will be saved at `.enc_embeddings.pt` and `.dec_embeddings.pt`; +- additional flags `fix_word_vecs_enc` and `fix_word_vecs_dec` are available to freeze the embeddings. ## How do I use the Transformer model? The transformer model is very sensitive to hyperparameters. To run it -effectively you need to set a bunch of different options that mimic the Google -setup. We have confirmed the following command can replicate their WMT results. - -```shell -python train.py -data /tmp/de2/data -save_model /tmp/extra \ - -layers 6 -rnn_size 512 -word_vec_size 512 -transformer_ff 2048 -heads 8 \ - -encoder_type transformer -decoder_type transformer -position_encoding \ - -train_steps 200000 -max_generator_batches 2 -dropout 0.1 \ - -batch_size 4096 -batch_type tokens -normalization tokens -accum_count 2 \ - -optim adam -adam_beta2 0.998 -decay_method noam -warmup_steps 8000 -learning_rate 2 \ - -max_grad_norm 0 -param_init 0 -param_init_glorot \ - -label_smoothing 0.1 -valid_steps 10000 -save_checkpoint_steps 10000 \ - -world_size 4 -gpu_ranks 0 1 2 3 +effectively you need to set a bunch of different options that mimic the [Google](https://arxiv.org/abs/1706.03762) setup. We have confirmed the following configuration can replicate their WMT results. + +```yaml + +... + +# General opts +save_model: foo +save_checkpoint_steps: 10000 +valid_steps: 10000 +train_steps: 200000 + +# Batching +queue_size: 10000 +bucket_size: 32768 +world_size: 4 +gpu_ranks: [0, 1, 2, 3] +batch_type: "tokens" +batch_size: 4096 +valid_batch_size: 8 +max_generator_batches: 2 +accum_count: [4] +accum_steps: [0] + +# Optimization +model_dtype: "fp32" +optim: "adam" +learning_rate: 2 +warmup_steps: 8000 +decay_method: "noam" +adam_beta2: 0.998 +max_grad_norm: 0 +label_smoothing: 0.1 +param_init: 0 +param_init_glorot: true +normalization: "tokens" + +# Model +encoder_type: transformer +decoder_type: transformer +position_encoding: true +enc_layers: 6 +dec_layers: 6 +heads: 8 +rnn_size: 512 +word_vec_size: 512 +transformer_ff: 2048 +dropout_steps: [0] +dropout: [0.1] +attention_dropout: [0.1] ``` -Here are what each of the parameters mean: +Here are what the most important parameters mean: -* `param_init_glorot` `-param_init 0`: correct initialization of parameters -* `position_encoding`: add sinusoidal position encoding to each embedding -* `optim adam`, `decay_method noam`, `warmup_steps 8000`: use special learning rate. -* `batch_type tokens`, `normalization tokens`, `accum_count 4`: batch and normalize based on number of tokens and not sentences. Compute gradients based on four batches. +* `param_init_glorot` & `param_init 0`: correct initialization of parameters; +* `position_encoding`: add sinusoidal position encoding to each embedding; +* `optim adam`, `decay_method noam`, `warmup_steps 8000`: use special learning rate; +* `batch_type tokens`, `normalization tokens`: batch and normalize based on number of tokens and not sentences; +* `accum_count 4`: compute gradients based on four batches; * `label_smoothing 0.1`: use label smoothing loss. ## Do you support multi-gpu? @@ -95,67 +121,299 @@ If you want to use GPU id 1 and 3 of your OS, you will need to `export CUDA_VISI Both `-world_size` and `-gpu_ranks` need to be set. E.g. `-world_size 4 -gpu_ranks 0 1 2 3` will use 4 GPU on this node only. +**Warning - Deprecated** + +Multi-node distributed training is not properly implemented in OpenNMT-py 2.0 yet. + If you want to use 2 nodes with 2 GPU each, you need to set `-master_ip` and `-master_port`, and * `-world_size 4 -gpu_ranks 0 1`: on the first node * `-world_size 4 -gpu_ranks 2 3`: on the second node * `-accum_count 2`: This will accumulate over 2 batches before updating parameters. -if you use a regular network card (1 Gbps) then we suggest to use a higher `-accum_count` to minimize the inter-node communication. +If you use a regular network card (1 Gbps) then we suggest to use a higher `-accum_count` to minimize the inter-node communication. **Note:** -When training on several GPUs, you can't have them in 'Exclusive' compute mode (`nvidia-smi -c 3`). +In the legacy version, when training on several GPUs, you couldn't have them in 'Exclusive' compute mode (`nvidia-smi -c 3`). -The multi-gpu setup relies on a Producer/Consumer setup. This setup means there will be `2 + 1` processes spawned, with 2 processes per GPU, one for model training and one (Consumer) that hosts a `Queue` of batches that will be processed next. The additional process is the Producer, creating batches and sending them to the Consumers. This setup is beneficial for both wall time and memory, since it loads data shards 'in advance', and does not require to load it for each GPU process. +The multi-gpu setup relied on a Producer/Consumer setup. This setup means there will be `2 + 1` processes spawned, with 2 processes per GPU, one for model training and one (Consumer) that hosts a `Queue` of batches that will be processed next. The additional process is the Producer, creating batches and sending them to the Consumers. This setup is beneficial for both wall time and memory, since it loads data shards 'in advance', and does not require to load it for each GPU process. + +The new codebase allows GPUs to be in exclusive mode, because batches are moved to the device later in the process. Hence, there is no 'producer' process on each GPU. ## How can I ensemble Models at inference? -You can specify several models in the translate.py command line: -model model1_seed1 model2_seed2 +You can specify several models in the `onmt_translate` command line: `-model model1_seed1 model2_seed2` Bear in mind that your models must share the same target vocabulary. ## How can I weight different corpora at training? -### Preprocessing +This is naturally embedded in the data configuration format introduced in OpenNMT-py 2.0. Each entry of the `data` configuration will have its own *weight*. When building batches, we'll sequentially take *weight* example from each corpus. + +**Note**: don't worry about batch homogeneity/heterogeneity, the pooling mechanism is here for that reason. Instead of building batches one at a time, we will load `pool_factor` of batches worth of examples, sort them by length, build batches and then yield them in a random order. + +### Example -We introduced `-train_ids` which is a list of IDs that will be given to the preprocessed shards. +In the following example, we will sequentially sample 7 examples from *corpus_1*, and 3 examples from *corpus_2*, and so on: -E.g. we have two corpora : `parallel.en` and `parallel.de` + `from_backtranslation.en` `from_backtranslation.de`, we can pass the following in the `preprocess.py` command: +```yaml +# .yaml -```shell ... --train_src parallel.en from_backtranslation.en \ --train_tgt parallel.de from_backtranslation.de \ --train_ids A B \ --save_data my_data \ + +# Corpus opts: +data: + corpus_1: + path_src: toy-ende/src-train1.txt + path_tgt: toy-ende/tgt-train1.txt + weight: 7 + corpus_2: + path_src: toy-ende/src-train1.txt + path_tgt: toy-ende/tgt-train1.txt + weight: 3 + valid: + path_src: toy-ende/src-val.txt + path_tgt: toy-ende/tgt-val.txt ... + ``` -and it will dump `my_data.train_A.X.pt` based on `parallel.en`//`parallel.de` and `my_data.train_B.X.pt` based on `from_backtranslation.en`//`from_backtranslation.de`. +## How can I apply on-the-fly tokenization and subword regularization when training? -### Training +This is naturally embedded in the data configuration format introduced in OpenNMT-py 2.0. Each entry of the `data` configuration will have its own `transforms`. `transforms` basically is a `list` of functions that will be applied sequentially to the examples when read from file. -We introduced `-data_ids` based on the same principle as above, as well as `-data_weights`, which is the list of the weight each corpus should have. -E.g. +### Example + +This example applies sentencepiece tokenization with `pyonmttok`, with `nbest=20` and `alpha=0.1`. + +```yaml +# .yaml -```shell ... --data my_data \ --data_ids A B \ --data_weights 1 7 \ + +# Tokenization options +src_subword_type: sentencepiece +src_subword_model: examples/subword.spm.model +tgt_subword_type: sentencepiece +tgt_subword_model: examples/subword.spm.model + +# Number of candidates for SentencePiece sampling +subword_nbest: 20 +# Smoothing parameter for SentencePiece sampling +subword_alpha: 0.1 +# Specific arguments for pyonmttok +onmttok_kwargs: "{'mode': 'none', 'spacer_annotate': True}" + +# Corpus opts: +data: + corpus_1: + path_src: toy-ende/src-train1.txt + path_tgt: toy-ende/tgt-train1.txt + transforms: [onmt_tokenize] + weight: 1 + valid: + path_src: toy-ende/src-val.txt + path_tgt: toy-ende/tgt-val.txt + transforms: [onmt_tokenize] ... + +``` + +Other tokenization methods and transforms are readily available. See the dedicated docs for more details. + +## What are the readily available on-the-fly data transforms? + +It's your lucky day! We already embedded several transforms that can be used easily. + +Note: all the details about every flag and options for each transform can be found in the [train](#train) section. + +### General purpose + +#### Filter examples by length + +Transform name: `filtertoolong` + +Class: `onmt.transforms.misc.FilterTooLongTransform` + +The following options can be added to the configuration : +- `src_seq_length`: maximum source sequence length; +- `tgt_seq_length`: maximum target sequence length. + +#### Add custom prefix to examples + +Transform name: `prefix` + +Class: `onmt.transforms.misc.PrefixTransform` + +For each dataset that the `prefix` transform is applied to, you can set the additional `src_prefix` and `tgt_prefix` parameters in its data configuration: + +```yaml +data: + corpus_1: + path_src: toy-ende/src-train1.txt + path_tgt: toy-ende/tgt-train1.txt + transforms: [prefix] + weight: 1 + src_prefix: __some_src_prefix__ + tgt_prefix: __some_tgt_prefix__ ``` -will mean that we'll look for `my_data.train_A.*.pt` and `my_data.train_B.*.pt`, and that when building batches, we'll take 1 example from corpus A, then 7 examples from corpus B, and so on. -**Warning**: This means that we'll load as many shards as we have `-data_ids`, in order to produce batches containing data from every corpus. It may be a good idea to reduce the `-shard_size` at preprocessing. -## Can I get word alignment while translating? +### Tokenization + +Common options for the tokenization transforms are the following: + +- `src_subword_model`: path of source side (or both if shared) subword model; +- `tgt_subword_model`: path of target side subword model; +- `src_subword_nbest`: number of candidates for subword regularization (sentencepiece), source side; +- `tgt_subword_nbest`: number of candidates for subword regularization (sentencepiece), target_side; +- `src_subword_alpha`: smoothing parameter for sentencepiece regularization / dropout probability for BPE, source side; +- `tgt_subword_alpha`: smoothing parameter for sentencepiece regularization / dropout probability for BPE, target side. + +#### [OpenNMT Tokenizer](https://github.com/opennmt/Tokenizer) + +Transform name: `onmt_tokenize` + +Class: `onmt.transforms.misc.ONMTTokenizerTransform` + +Additional options are available: +- `src_subword_type`: type of subword model for source side (from `["none", "sentencepiece", "bpe"]`); +- `tgt_subword_type`: type of subword model for target side (from `["none", "sentencepiece", "bpe"]`); +- `src_onmttok_kwargs`: additional kwargs for pyonmttok Tokenizer class, source side; +- `src_onmttok_kwargs`: additional kwargs for pyonmttok Tokenizer class, target side. + +#### [SentencePiece](https://github.com/google/sentencepiece) + +Transform name: `sentencepiece` + +Class: `onmt.transforms.misc.SentencePieceTransform` + +The `src_subword_model` and `tgt_subword_model` should be valid sentencepiece models. + +#### BPE ([subword-nmt](https://github.com/rsennrich/subword-nmt)) + +Transform name: `bpe` + +Class: `onmt.transforms.misc.BPETransform` + +The `src_subword_model` and `tgt_subword_model` should be valid BPE models. + +### BART-style noise + +BART-style noise is composed of several parts, as described in [BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension](https://arxiv.org/abs/1910.13461). + +These different types of noise can be controlled with the following options: + +- `permute_sent_ratio`: proportion of sentences to permute (default boundaries are ".", "?" and "!"); +- `rotate_ratio`: proportion of inputs to permute; +- `insert_ratio`: proportion of additional random tokens to insert; +- `random_ratio`: proportion of tokens to replace with random; +- `mask_ratio`: proportion of words/subwords to mask; +- `mask_length`: length of masking window (from `["subword", "word", "span-poisson"]`); +- `poisson_lambda`: $\lambda$ value for Poisson distribution to sample span length (in the case of `mask_length` set to `span-poisson`); +- `replace_length`: when masking N tokens, replace with 0, 1, " "or N tokens. (set to -1 for N). + +### SwitchOut and sampling + +#### [SwitchOut](https://arxiv.org/abs/1808.07512) + +Transform name: `switchout` + +Class: `onmt.transforms.misc.SwitchOutTransform` + +Options: + +- `switchout_temperature`: sampling temperature for SwitchOut. + +#### Drop some tokens + +Transform name: `tokendrop` + +Class: `onmt.transforms.misc.TokenDropTransform` + +Options: + +- `tokendrop_temperature`: sampling temperature for token deletion. + +#### Mask some tokens + +Transform name: `tokenmask` + +Class: `onmt.transforms.misc.TokenMaskTransform` + +Options: + +- `tokenmask_temperature`: sampling temperature for token masking. + +## How can I create custom on-the-fly data transforms? + +The code is easily extendable with custom transforms inheriting from the `Transform` base class. + +You can for instance have a look at the `FilterTooLongTransform` class as a template: + +```python +@register_transform(name='filtertoolong') +class FilterTooLongTransform(Transform): + """Filter out sentence that are too long.""" + + def __init__(self, opts): + super().__init__(opts) + self.src_seq_length = opts.src_seq_length + self.tgt_seq_length = opts.tgt_seq_length + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + group = parser.add_argument_group("Transform/Filter") + group.add("--src_seq_length", "-src_seq_length", type=int, default=200, + help="Maximum source sequence length.") + group.add("--tgt_seq_length", "-tgt_seq_length", type=int, default=200, + help="Maximum target sequence length.") + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Return None if too long else return as is.""" + if (len(example['src']) > self.src_seq_length or + len(example['tgt']) > self.tgt_seq_length): + if stats is not None: + stats.filter_too_long() + return None + else: + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}, {}={}'.format( + 'src_seq_length', self.src_seq_length, + 'tgt_seq_length', self.tgt_seq_length + ) +``` + +Methods: +- `add_options` allows to add custom options that would be necessary for the transform configuration; +- `apply` is where the transform happens; +- `_repr_args` is for clean logging purposes. + +As you can see, there is the `@register_transform` wrapper before the class definition. This will allow for the class to be automatically detected (if put in the proper `transforms` folder) and usable in your training configurations through its `name` argument. + +The `example` argument of `apply` is a `dict` of the form: +``` +{ + "src": , + "tgt": , + "align": # optional +} +``` + +This is defined in `onmt.inputters.corpus.ParallelCorpus.load`. This class is not easily extendable for now but it can be considered for future developments. For instance, we could create some `CustomParallelCorpus` class that would handle other kind of inputs. + + +## Can I get word alignments while translating? ### Raw alignments from averaging Transformer attention heads Currently, we support producing word alignment while translating for Transformer based models. Using `-report_align` when calling `translate.py` will output the inferred alignments in Pharaoh format. Those alignments are computed from an argmax on the average of the attention heads of the *second to last* decoder layer. The resulting alignment src-tgt (Pharaoh) will be pasted to the translation sentence, separated by ` ||| `. -Note: The *second to last* default behaviour was empirically determined. It is not the same as the paper (they take the *penultimate* layer), probably because of light differences in the architecture. +Note: The *second to last* default behaviour was empirically determined. It is not the same as the paper (they take the *penultimate* layer), probably because of slight differences in the architecture. * alignments use the standard "Pharaoh format", where a pair `i-j` indicates the ith word of source language is aligned to jth word of target language. * Example: {'src': 'das stimmt nicht !'; 'output': 'that is not true ! ||| 0-0 0-1 1-2 2-3 1-4 1-5 3-6'} @@ -167,16 +425,37 @@ Note: The *second to last* default behaviour was empirically determined. It is n The quality of output alignments can be further improved by providing reference alignments while training. This will invoke multi-task learning on translation and alignment. This is an implementation based on the paper [Jointly Learning to Align and Translate with Transformer Models](https://arxiv.org/abs/1909.02074). The data need to be preprocessed with the reference alignments in order to learn the supervised task. +The reference alignment file(s) can for instance be generated by [GIZA++](https://github.com/moses-smt/mgiza/) or [fast_align](https://github.com/clab/fast_align). -When calling `preprocess.py`, add: +In order to learn the supervised task, you can set for each dataset the path of its alignment file in the YAML configuration file: -* `--train_align `: path(s) to the training alignments in Pharaoh format -* `--valid_align `: path to the validation set alignments in Pharaoh format (optional). -The reference alignment file(s) could be generated by [GIZA++](https://github.com/moses-smt/mgiza/) or [fast_align](https://github.com/clab/fast_align). +```yaml +.yaml + +... + +# Corpus opts: +data: + corpus_1: + path_src: toy-ende/src-train1.txt + path_tgt: toy-ende/tgt-train1.txt + # src - tgt alignments in pharaoh format + path_align: toy-ende/src-tgt.align + transforms: [] + weight: 1 + valid: + path_src: toy-ende/src-val.txt + path_tgt: toy-ende/tgt-val.txt + transforms: [] + +... +``` -Note: There should be no blank lines in the alignment files provided. +**Notes**: +- Most of the transforms are for now incompatible with the joint alignment learning pipeline, because most of them make modifications at the token level, hence alignments would be made invalid. +- There should be no blank lines in the alignment files provided. -Options to learn such alignments are: +Training options to learn such alignments are: * `-lambda_align`: set the value > 0.0 to enable joint align training, the paper suggests 0.05; * `-alignment_layer`: indicate the index of the decoder layer; diff --git a/docs/source/Library.ipynb b/docs/source/Library.ipynb deleted file mode 100644 index d0b9c75bd4..0000000000 --- a/docs/source/Library.ipynb +++ /dev/null @@ -1,319 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import torch\n", - "import torch.nn as nn\n", - "\n", - "import onmt\n", - "import onmt.inputters\n", - "import onmt.modules\n", - "import onmt.utils" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We begin by loading in the vocabulary for the model of interest. This will let us check vocab size and to get the special ids for padding." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "vocab = dict(torch.load(\"../../data/data.vocab.pt\"))\n", - "src_padding = vocab[\"src\"].stoi[onmt.inputters.PAD_WORD]\n", - "tgt_padding = vocab[\"tgt\"].stoi[onmt.inputters.PAD_WORD]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next we specify the core model itself. Here we will build a small model with an encoder and an attention based input feeding decoder. Both models will be RNNs and the encoder will be bidirectional" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "emb_size = 10\n", - "rnn_size = 6\n", - "# Specify the core model. \n", - "encoder_embeddings = onmt.modules.Embeddings(emb_size, len(vocab[\"src\"]),\n", - " word_padding_idx=src_padding)\n", - "\n", - "encoder = onmt.encoders.RNNEncoder(hidden_size=rnn_size, num_layers=1, \n", - " rnn_type=\"LSTM\", bidirectional=True,\n", - " embeddings=encoder_embeddings)\n", - "\n", - "decoder_embeddings = onmt.modules.Embeddings(emb_size, len(vocab[\"tgt\"]),\n", - " word_padding_idx=tgt_padding)\n", - "decoder = onmt.decoders.decoder.InputFeedRNNDecoder(hidden_size=rnn_size, num_layers=1, \n", - " bidirectional_encoder=True,\n", - " rnn_type=\"LSTM\", embeddings=decoder_embeddings)\n", - "model = onmt.models.model.NMTModel(encoder, decoder)\n", - "\n", - "# Specify the tgt word generator and loss computation module\n", - "model.generator = nn.Sequential( \n", - " nn.Linear(rnn_size, len(vocab[\"tgt\"])), \n", - " nn.LogSoftmax())\n", - "loss = onmt.utils.loss.NMTLossCompute(model.generator, vocab[\"tgt\"]) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we set up the optimizer. This could be a core torch optim class, or our wrapper which handles learning rate updates and gradient normalization automatically." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "optim = onmt.utils.optimizers.Optimizer(method=\"sgd\", lr=1, max_grad_norm=2)\n", - "optim.set_parameters(model.named_parameters())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we load the data from disk. Currently will need to call a function to load the fields into the data as well. " - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "# Load some data\n", - "data = torch.load(\"../../data/data.train.1.pt\")\n", - "valid_data = torch.load(\"../../data/data.valid.1.pt\")\n", - "data.load_fields(vocab)\n", - "valid_data.load_fields(vocab)\n", - "data.examples = data.examples[:100] " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To iterate through the data itself we use a torchtext iterator class. We specify one for both the training and test data. " - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "train_iter = onmt.inputters.OrderedIterator( \n", - " dataset=data, batch_size=10, \n", - " device=-1, \n", - " repeat=False)\n", - "valid_iter = onmt.inputters.OrderedIterator( \n", - " dataset=valid_data, batch_size=10, \n", - " device=-1,\n", - " train=False) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Finally we train." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 0, 0/ 10; acc: 0.00; ppl: 1225.23; 1320 src tok/s; 1320 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 1/ 10; acc: 9.50; ppl: 996.33; 1188 src tok/s; 1194 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 2/ 10; acc: 16.51; ppl: 694.48; 1265 src tok/s; 1267 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 3/ 10; acc: 20.49; ppl: 470.39; 1459 src tok/s; 1420 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 4/ 10; acc: 22.68; ppl: 387.03; 1511 src tok/s; 1462 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 5/ 10; acc: 24.58; ppl: 345.44; 1625 src tok/s; 1509 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 6/ 10; acc: 25.37; ppl: 314.39; 1586 src tok/s; 1493 tgt tok/s; 1514090454 s elapsed\n", - "Epoch 0, 7/ 10; acc: 26.14; ppl: 291.15; 1593 src tok/s; 1520 tgt tok/s; 1514090455 s elapsed\n", - "Epoch 0, 8/ 10; acc: 26.32; ppl: 274.79; 1606 src tok/s; 1545 tgt tok/s; 1514090455 s elapsed\n", - "Epoch 0, 9/ 10; acc: 26.83; ppl: 247.32; 1669 src tok/s; 1614 tgt tok/s; 1514090455 s elapsed\n", - "Validation\n", - "Epoch 0, 11/ 10; acc: 13.41; ppl: 111.94; 0 src tok/s; 7329 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 0/ 10; acc: 6.59; ppl: 147.05; 1849 src tok/s; 1743 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 1/ 10; acc: 22.10; ppl: 130.66; 2002 src tok/s; 1957 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 2/ 10; acc: 20.16; ppl: 122.49; 1748 src tok/s; 1760 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 3/ 10; acc: 23.52; ppl: 117.41; 1690 src tok/s; 1698 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 4/ 10; acc: 24.16; ppl: 119.42; 1647 src tok/s; 1662 tgt tok/s; 1514090464 s elapsed\n", - "Epoch 1, 5/ 10; acc: 25.44; ppl: 115.31; 1775 src tok/s; 1709 tgt tok/s; 1514090465 s elapsed\n", - "Epoch 1, 6/ 10; acc: 24.05; ppl: 115.11; 1780 src tok/s; 1718 tgt tok/s; 1514090465 s elapsed\n", - "Epoch 1, 7/ 10; acc: 25.32; ppl: 109.59; 1799 src tok/s; 1765 tgt tok/s; 1514090465 s elapsed\n", - "Epoch 1, 8/ 10; acc: 25.14; ppl: 108.16; 1771 src tok/s; 1734 tgt tok/s; 1514090465 s elapsed\n", - "Epoch 1, 9/ 10; acc: 25.58; ppl: 107.13; 1817 src tok/s; 1757 tgt tok/s; 1514090465 s elapsed\n", - "Validation\n", - "Epoch 1, 11/ 10; acc: 19.58; ppl: 88.09; 0 src tok/s; 7371 tgt tok/s; 1514090474 s elapsed\n" - ] - } - ], - "source": [ - "trainer = onmt.Trainer(model, loss, loss, optim)\n", - "\n", - "def report_func(*args):\n", - " stats = args[-1]\n", - " stats.output(args[0], args[1], 10, 0)\n", - " return stats\n", - "\n", - "for epoch in range(2):\n", - " trainer.train(epoch, report_func)\n", - " val_stats = trainer.validate()\n", - "\n", - " print(\"Validation\")\n", - " val_stats.output(epoch, 11, 10, 0)\n", - " trainer.epoch_step(val_stats.ppl(), epoch)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To use the model, we need to load up the translation functions " - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import onmt.translate" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "PRED SCORE: -4.0690\n", - "\n", - "SENT 0: ('The', 'competitors', 'have', 'other', 'advantages', ',', 'too', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.2736\n", - "\n", - "SENT 0: ('The', 'company', ''s', 'durability', 'goes', 'back', 'to', 'its', 'first', 'boss', ',', 'a', 'visionary', ',', 'Thomas', 'J.', 'Watson', 'Sr.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.0144\n", - "\n", - "SENT 0: ('"', 'From', 'what', 'we', 'know', 'today', ',', 'you', 'have', 'to', 'ask', 'how', 'I', 'could', 'be', 'so', 'wrong', '.', '"')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.1361\n", - "\n", - "SENT 0: ('Boeing', 'Co', 'shares', 'rose', '1.5%', 'to', '$', '67.94', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.1382\n", - "\n", - "SENT 0: ('Some', 'did', 'not', 'believe', 'him', ',', 'they', 'said', 'that', 'he', 'got', 'dizzy', 'even', 'in', 'the', 'truck', ',', 'but', 'always', 'wanted', 'to', 'fulfill', 'his', 'dream', ',', 'that', 'of', 'becoming', 'a', 'pilot', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -3.8881\n", - "\n", - "SENT 0: ('In', 'your', 'opinion', ',', 'the', 'council', 'should', 'ensure', 'that', 'the', 'band', 'immediately', 'above', 'the', 'Ronda', 'de', 'Dalt', 'should', 'provide', 'in', 'its', 'entirety', ',', 'an', 'area', 'of', 'equipment', 'to', 'conduct', 'a', 'smooth', 'transition', 'between', 'the', 'city', 'and', 'the', 'green', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.0778\n", - "\n", - "SENT 0: ('The', 'clerk', 'of', 'the', 'court', ',', 'Jorge', 'Yanez', ',', 'went', 'to', 'the', 'jail', 'of', 'the', 'municipality', 'of', 'San', 'Nicolas', 'of', 'Garza', 'to', 'notify', 'Jonah', 'that', 'he', 'has', 'been', 'legally', 'pardoned', 'and', 'his', 'record', 'will', 'be', 'filed', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.2479\n", - "\n", - "SENT 0: ('"', 'In', 'a', 'research', 'it', 'is', 'reported', 'that', 'there', 'are', 'no', 'parts', 'or', 'components', 'of', 'the', 'ship', 'in', 'another', 'place', ',', 'the', 'impact', 'is', 'presented', 'in', 'a', 'structural', 'way', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -3.8585\n", - "\n", - "SENT 0: ('On', 'the', 'asphalt', 'covering', ',', 'he', 'added', ',', 'is', 'placed', 'a', 'final', 'layer', 'called', 'rolling', 'covering', ',', 'which', 'is', 'made', '\\u200b', '\\u200b', 'of', 'a', 'fine', 'stone', 'material', ',', 'meaning', 'sand', 'also', 'dipped', 'into', 'the', 'asphalt', '.')\n", - "PRED 0: .\n", - "\n", - "PRED SCORE: -4.2298\n", - "\n", - "SENT 0: ('This', 'is', '200', 'bar', 'on', 'leaving', 'and', '100', 'bar', 'on', 'arrival', '.')\n", - "PRED 0: .\n", - "\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.5/dist-packages/torch/tensor.py:297: UserWarning: other is not broadcastable to self, but they have the same number of elements. Falling back to deprecated pointwise behavior.\n", - " return self.add_(other)\n" - ] - } - ], - "source": [ - "translator = onmt.translate.Translator(beam_size=10, fields=data.fields, model=model)\n", - "builder = onmt.translate.TranslationBuilder(data=valid_data, fields=data.fields)\n", - "\n", - "valid_data.src_vocabs\n", - "for batch in valid_iter:\n", - " trans_batch = translator.translate_batch(batch=batch, data=valid_data)\n", - " translations = builder.from_batch(trans_batch)\n", - " for trans in translations:\n", - " print(trans.log(0))\n", - " break" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.2" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/docs/source/Library.md b/docs/source/Library.md deleted file mode 100644 index eaa79b38b1..0000000000 --- a/docs/source/Library.md +++ /dev/null @@ -1,228 +0,0 @@ -# Library - -For this example, we will assume that we have run preprocess to -create our datasets. For instance - -> onmt_preprocess -train_src data/src-train.txt -train_tgt data/tgt-train.txt -valid_src data/src-val.txt -valid_tgt data/tgt-val.txt -save_data data/data -src_vocab_size 10000 -tgt_vocab_size 10000 - - - -```python -import torch -import torch.nn as nn - -import onmt -import onmt.inputters -import onmt.modules -import onmt.utils -``` - -We begin by loading in the vocabulary for the model of interest. This will let us check vocab size and to get the special ids for padding. - - -```python -vocab_fields = torch.load("data/data.vocab.pt") - -src_text_field = vocab_fields["src"].base_field -src_vocab = src_text_field.vocab -src_padding = src_vocab.stoi[src_text_field.pad_token] - -tgt_text_field = vocab_fields['tgt'].base_field -tgt_vocab = tgt_text_field.vocab -tgt_padding = tgt_vocab.stoi[tgt_text_field.pad_token] -``` - -Next we specify the core model itself. Here we will build a small model with an encoder and an attention based input feeding decoder. Both models will be RNNs and the encoder will be bidirectional - - -```python -emb_size = 100 -rnn_size = 500 -# Specify the core model. - -encoder_embeddings = onmt.modules.Embeddings(emb_size, len(src_vocab), - word_padding_idx=src_padding) - -encoder = onmt.encoders.RNNEncoder(hidden_size=rnn_size, num_layers=1, - rnn_type="LSTM", bidirectional=True, - embeddings=encoder_embeddings) - -decoder_embeddings = onmt.modules.Embeddings(emb_size, len(tgt_vocab), - word_padding_idx=tgt_padding) -decoder = onmt.decoders.decoder.InputFeedRNNDecoder( - hidden_size=rnn_size, num_layers=1, bidirectional_encoder=True, - rnn_type="LSTM", embeddings=decoder_embeddings) - -device = "cuda" if torch.cuda.is_available() else "cpu" -model = onmt.models.model.NMTModel(encoder, decoder) -model.to(device) - -# Specify the tgt word generator and loss computation module -model.generator = nn.Sequential( - nn.Linear(rnn_size, len(tgt_vocab)), - nn.LogSoftmax(dim=-1)).to(device) - -loss = onmt.utils.loss.NMTLossCompute( - criterion=nn.NLLLoss(ignore_index=tgt_padding, reduction="sum"), - generator=model.generator) -``` - -Now we set up the optimizer. Our wrapper around a core torch optim class handles learning rate updates and gradient normalization automatically. - - -```python -lr = 1 -torch_optimizer = torch.optim.SGD(model.parameters(), lr=lr) -optim = onmt.utils.optimizers.Optimizer( - torch_optimizer, learning_rate=lr, max_grad_norm=2) -``` - -Now we load the data from disk with the associated vocab fields. To iterate through the data itself we use a wrapper around a torchtext iterator class. We specify one for both the training and test data. - - -```python -# Load some data -from itertools import chain -train_data_file = "data/data.train.0.pt" -valid_data_file = "data/data.valid.0.pt" -train_iter = onmt.inputters.inputter.DatasetLazyIter(dataset_paths=[train_data_file], - fields=vocab_fields, - batch_size=50, - batch_size_multiple=1, - batch_size_fn=None, - device=device, - is_train=True, - repeat=True) - -valid_iter = onmt.inputters.inputter.DatasetLazyIter(dataset_paths=[valid_data_file], - fields=vocab_fields, - batch_size=10, - batch_size_multiple=1, - batch_size_fn=None, - device=device, - is_train=False, - repeat=False) -``` - -Finally we train. Keeping track of the output requires a report manager. - - -```python -report_manager = onmt.utils.ReportMgr( - report_every=50, start_time=None, tensorboard_writer=None) -trainer = onmt.Trainer(model=model, - train_loss=loss, - valid_loss=loss, - optim=optim, - report_manager=report_manager) -trainer.train(train_iter=train_iter, - train_steps=400, - valid_iter=valid_iter, - valid_steps=200) -``` - -``` -[2019-02-15 16:34:17,475 INFO] Start training loop and validate every 200 steps... -[2019-02-15 16:34:17,601 INFO] Loading dataset from data/data.train.0.pt, number of examples: 10000 -[2019-02-15 16:35:43,873 INFO] Step 50/ 400; acc: 11.54; ppl: 1714.07; xent: 7.45; lr: 1.00000; 662/656 tok/s; 86 sec -[2019-02-15 16:37:05,965 INFO] Step 100/ 400; acc: 13.75; ppl: 534.80; xent: 6.28; lr: 1.00000; 675/671 tok/s; 168 sec -[2019-02-15 16:38:31,289 INFO] Step 150/ 400; acc: 15.02; ppl: 439.96; xent: 6.09; lr: 1.00000; 675/668 tok/s; 254 sec -[2019-02-15 16:39:56,715 INFO] Step 200/ 400; acc: 16.08; ppl: 357.62; xent: 5.88; lr: 1.00000; 642/647 tok/s; 339 sec -[2019-02-15 16:39:56,811 INFO] Loading dataset from data/data.valid.0.pt, number of examples: 3000 -[2019-02-15 16:41:13,415 INFO] Validation perplexity: 208.73 -[2019-02-15 16:41:13,415 INFO] Validation accuracy: 23.3507 -[2019-02-15 16:41:13,567 INFO] Loading dataset from data/data.train.0.pt, number of examples: 10000 -[2019-02-15 16:42:41,562 INFO] Step 250/ 400; acc: 17.07; ppl: 310.41; xent: 5.74; lr: 1.00000; 347/344 tok/s; 504 sec -[2019-02-15 16:44:04,899 INFO] Step 300/ 400; acc: 19.17; ppl: 262.81; xent: 5.57; lr: 1.00000; 665/661 tok/s; 587 sec -[2019-02-15 16:45:33,653 INFO] Step 350/ 400; acc: 19.38; ppl: 244.81; xent: 5.50; lr: 1.00000; 649/642 tok/s; 676 sec -[2019-02-15 16:47:06,141 INFO] Step 400/ 400; acc: 20.44; ppl: 214.75; xent: 5.37; lr: 1.00000; 593/598 tok/s; 769 sec -[2019-02-15 16:47:06,265 INFO] Loading dataset from data/data.valid.0.pt, number of examples: 3000 -[2019-02-15 16:48:27,328 INFO] Validation perplexity: 150.277 -[2019-02-15 16:48:27,328 INFO] Validation accuracy: 24.2132 -``` - -To use the model, we need to load up the translation functions. A Translator object requires the vocab fields, readers for source and target and a global scorer. - - -```python -import onmt.translate - -src_reader = onmt.inputters.str2reader["text"] -tgt_reader = onmt.inputters.str2reader["text"] -scorer = onmt.translate.GNMTGlobalScorer(alpha=0.7, - beta=0., - length_penalty="avg", - coverage_penalty="none") -gpu = 0 if torch.cuda.is_available() else -1 -translator = onmt.translate.Translator(model=model, - fields=vocab_fields, - src_reader=src_reader, - tgt_reader=tgt_reader, - global_scorer=scorer, - gpu=gpu) -builder = onmt.translate.TranslationBuilder(data=torch.load(valid_data_file), - fields=vocab_fields) - - -for batch in valid_iter: - trans_batch = translator.translate_batch( - batch=batch, src_vocabs=[src_vocab], - attn_debug=False) - translations = builder.from_batch(trans_batch) - for trans in translations: - print(trans.log(0)) -``` -``` -[2019-02-15 16:48:27,419 INFO] Loading dataset from data/data.valid.0.pt, number of examples: 3000 - - -SENT 0: ['Parliament', 'Does', 'Not', 'Support', 'Amendment', 'Freeing', 'Tymoshenko'] -PRED 0: ist ein . -PRED SCORE: -1.0983 - - -SENT 0: ['Today', ',', 'the', 'Ukraine', 'parliament', 'dismissed', ',', 'within', 'the', 'Code', 'of', 'Criminal', 'Procedure', 'amendment', ',', 'the', 'motion', 'to', 'revoke', 'an', 'article', 'based', 'on', 'which', 'the', 'opposition', 'leader', ',', 'Yulia', 'Tymoshenko', ',', 'was', 'sentenced', '.'] -PRED 0: ist das . -PRED SCORE: -1.5950 - - -SENT 0: ['The', 'amendment', 'that', 'would', 'lead', 'to', 'freeing', 'the', 'imprisoned', 'former', 'Prime', 'Minister', 'was', 'revoked', 'during', 'second', 'reading', 'of', 'the', 'proposal', 'for', 'mitigation', 'of', 'sentences', 'for', 'economic', 'offences', '.'] -PRED 0: Es gibt es das der für . -PRED SCORE: -1.5128 - - -SENT 0: ['In', 'October', ',', 'Tymoshenko', 'was', 'sentenced', 'to', 'seven', 'years', 'in', 'prison', 'for', 'entering', 'into', 'what', 'was', 'reported', 'to', 'be', 'a', 'disadvantageous', 'gas', 'deal', 'with', 'Russia', '.'] -PRED 0: ist ein . -PRED SCORE: -1.5578 - - -SENT 0: ['The', 'verdict', 'is', 'not', 'yet', 'final;', 'the', 'court', 'will', 'hear', 'Tymoshenko', ''s', 'appeal', 'in', 'December', '.'] -PRED 0: ist nicht . -PRED SCORE: -0.9623 - - -SENT 0: ['Tymoshenko', 'claims', 'the', 'verdict', 'is', 'a', 'political', 'revenge', 'of', 'the', 'regime;', 'in', 'the', 'West', ',', 'the', 'trial', 'has', 'also', 'evoked', 'suspicion', 'of', 'being', 'biased', '.'] -PRED 0: ist ein . -PRED SCORE: -0.8703 - - -SENT 0: ['The', 'proposal', 'to', 'remove', 'Article', '365', 'from', 'the', 'Code', 'of', 'Criminal', 'Procedure', ',', 'upon', 'which', 'the', 'former', 'Prime', 'Minister', 'was', 'sentenced', ',', 'was', 'supported', 'by', '147', 'members', 'of', 'parliament', '.'] -PRED 0: Sie sich mit . -PRED SCORE: -1.4778 - - -SENT 0: ['Its', 'ratification', 'would', 'require', '226', 'votes', '.'] -PRED 0: Sie sich . -PRED SCORE: -1.3341 - - -SENT 0: ['Libya', ''s', 'Victory'] -PRED 0: Sie die . -PRED SCORE: -1.5192 - - -SENT 0: ['The', 'story', 'of', 'Libya', ''s', 'liberation', ',', 'or', 'rebellion', ',', 'already', 'has', 'its', 'defeated', '.'] -PRED 0: ist ein . -PRED SCORE: -1.2772 - -... diff --git a/docs/source/examples.rst b/docs/source/examples.rst deleted file mode 100644 index 9ef96b8576..0000000000 --- a/docs/source/examples.rst +++ /dev/null @@ -1,5 +0,0 @@ -== Examples == - - -.. include:: quickstart.md -.. include:: extended.md diff --git a/docs/source/ggnn.md b/docs/source/examples/GGNN.md similarity index 63% rename from docs/source/ggnn.md rename to docs/source/examples/GGNN.md index 8d17fdb519..8235d7cc1a 100644 --- a/docs/source/ggnn.md +++ b/docs/source/examples/GGNN.md @@ -4,11 +4,11 @@ Graph-to-sequence networks allow information represtable as a graph (such as an The training option `-encoder_type ggnn` implements a GGNN (Gated Graph Neural Network) based on github.com/JamesChuanggg/ggnn.pytorch.git which is based on the paper "Gated Graph Sequence Neural Networks" by Y. Li, D. Tarlow, M. Brockschmidt, and R. Zemel. -The ggnn encoder is used for program equivalence proof generation in the paper Equivalence of Dataflow Graphs via Rewrite Rules Using a Graph-to-Sequence Neural Model. That paper shows the benefit of the graph-to-sequence model over a sequence-to-sequence model for this problem which can be well represented with graphical input. The integration of the ggnn network into the OpenNMT-py system supports attention on the nodes as well as a copy mechanism. +The ggnn encoder is used for program equivalence proof generation in the paper [Equivalence of Dataflow Graphs via Rewrite Rules Using a Graph-to-Sequence Neural Model](https://arxiv.org/abs/2002.06799). That paper shows the benefit of the graph-to-sequence model over a sequence-to-sequence model for this problem which can be well represented with graphical input. The integration of the ggnn network into the OpenNMT-py system supports attention on the nodes as well as a copy mechanism. ### Dependencies -* There are no additional dependencies beyond the rnn-to-rnn sequeence2sequence requirements. +* There are no additional dependencies beyond the rnn-to-rnn sequence2sequence requirements. ### Quick Start @@ -24,22 +24,72 @@ cd OpenNMT-py ``` -1) Preprocess the data. - -``` -python preprocess.py -train_src $data_path/src-train.txt -train_tgt $data_path/tgt-train.txt -valid_src $data_path/src-val.txt -valid_tgt $data_path/tgt-val.txt -src_seq_length 1000 -tgt_seq_length 30 -src_vocab $data_path/srcvocab.txt -tgt_vocab $data_path/tgtvocab.txt -dynamic_dict -save_data $data_path/final 2>&1 > $data_path/preprocess.out +The YAML configuration for this example is the following: + +```yaml +# ggnn_example.yaml +## Where the necessary objects will be written +save_data: /OpenNMT-py-ggnn-example/run/example + +# Filter long examples +src_seq_length: 1000 +tgt_seq_length: 30 + +# Data definition +data: + cnndm: + path_src: /OpenNMT-py-ggnn-example/src-train.txt + path_tgt: /OpenNMT-py-ggnn-example/tgt-train.txt + transforms: [filtertoolong] + weight: 1 + valid: + path_src: /OpenNMT-py-ggnn-example/src-val.txt + path_tgt: /OpenNMT-py-ggnn-example/tgt-val.txt + +src_vocab: /OpenNMT-py-ggnn-example/srcvocab.txt +tgt_vocab: /OpenNMT-py-ggnn-example/tgtvocab.txt + +save_model: /OpenNMT-py-ggnn-example/run/model + +# Model options +train_steps: 10000 +save_checkpoint_steps: 5000 +encoder_type: ggnn +layers: 2 +decoder_type: rnn +rnn_size: 256 +learning_rate: 0.1 +start_decay_steps: 5000 +learning_rate_decay: 0.8 +global_attention: general +batch_size: 32 +word_vec_size: 256 +bridge: true +gpu_ranks: 0 +n_edge_types: 9 +state_dim: 256 +n_steps: 10 +n_node: 64 ``` 2) Train the model. +You can simply run the following command: + ``` -python train.py -data $data_path/final -encoder_type ggnn -layers 2 -decoder_type rnn -rnn_size 256 -learning_rate 0.1 -start_decay_steps 5000 -learning_rate_decay 0.8 -global_attention general -batch_size 32 -word_vec_size 256 -bridge -train_steps 10000 -gpu_ranks 0 -save_checkpoint_steps 5000 -save_model $data_path/final-model -src_vocab $data_path/srcvocab.txt -n_edge_types 9 -state_dim 256 -n_steps 10 -n_node 64 > $data_path/train.final.out +python train.py -config ggnn_example.yaml ``` 3) Translate the graph of 2 equivalent linear algebra expressions into the axiom list which proves them equivalent. ``` -python translate.py -model $data_path/final-model_step_10000.pt -src $data_path/src-test.txt -beam_size 5 -n_best 5 -gpu 0 -output $data_path/pred-test_beam5.txt -dynamic_dict 2>&1 > $data_path/translate5.out +python translate.py \ + -model /OpenNMT-py-ggnn-example/run/model_step_10000.pt \ + -src /OpenNMT-py-ggnn-example$data_path/src-test.txt \ + -beam_size 5 -n_best 5 \ + -gpu 0 \ + -output /OpenNMT-py-ggnn-example/pred-test_beam5.txt \ + 2>&1 > /OpenNMT-py-ggnn-example/translate5.out ``` ### Graph data format @@ -91,4 +141,4 @@ identifiers in the edge list. ### Acknowledgement -This gated graph neural network is leveraged from github.com/JamesChuanggg/ggnn.pytorch.git which is based on the paper "Gated Graph Sequence Neural Networks" by Y. Li, D. Tarlow, M. Brockschmidt, and R. Zemel. +This gated graph neural network is leveraged from https://github.com/JamesChuanggg/ggnn.pytorch.git which is based on the paper [Gated Graph Sequence Neural Networks](https://arxiv.org/abs/1511.05493) by Y. Li, D. Tarlow, M. Brockschmidt, and R. Zemel. diff --git a/docs/source/examples/Library.ipynb b/docs/source/examples/Library.ipynb new file mode 100644 index 0000000000..941c20cc8d --- /dev/null +++ b/docs/source/examples/Library.ipynb @@ -0,0 +1,886 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to use OpenNMT-py as a Library" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The example notebook (available [here](https://github.com/OpenNMT/OpenNMT-py/blob/master/docs/source/examples/Library.ipynb)) should be able to run as a standalone execution, provided `onmt` is in the path (installed via `pip` for instance).\n", + "\n", + "Some parts may not be 100% 'library-friendly' but it's mostly workable." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Import a few modules and functions that will be necessary" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "import torch\n", + "import torch.nn as nn\n", + "from argparse import Namespace\n", + "from collections import defaultdict, Counter" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import onmt\n", + "from onmt.inputters.inputter import _load_vocab, _build_fields_vocab, get_fields, IterOnDevice\n", + "from onmt.inputters.corpus import ParallelCorpus\n", + "from onmt.inputters.dynamic_iterator import DynamicDatasetIter\n", + "from onmt.translate import GNMTGlobalScorer, Translator, TranslationBuilder\n", + "from onmt.utils.misc import set_random_seed" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Enable logging" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# enable logging\n", + "from onmt.utils.logging import init_logger, logger\n", + "init_logger()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set random seed" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "is_cuda = torch.cuda.is_available()\n", + "set_random_seed(1111, is_cuda)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Retrieve data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To make a proper example, we will need some data, as well as some vocabulary(ies).\n", + "\n", + "Let's take the same data as in the [quickstart](https://opennmt.net/OpenNMT-py/quickstart.html):" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2020-09-25 15:28:05-- https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz\n", + "Resolving s3.amazonaws.com (s3.amazonaws.com)... 52.217.18.38\n", + "Connecting to s3.amazonaws.com (s3.amazonaws.com)|52.217.18.38|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1662081 (1,6M) [application/x-gzip]\n", + "Saving to: ‘toy-ende.tar.gz.5’\n", + "\n", + "toy-ende.tar.gz.5 100%[===================>] 1,58M 2,33MB/s in 0,7s \n", + "\n", + "2020-09-25 15:28:07 (2,33 MB/s) - ‘toy-ende.tar.gz.5’ saved [1662081/1662081]\n", + "\n" + ] + } + ], + "source": [ + "!wget https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "!tar xf toy-ende.tar.gz" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "config.yaml src-test.txt src-val.txt tgt-train.txt\r\n", + "\u001b[0m\u001b[01;34mrun\u001b[0m/ src-train.txt tgt-test.txt tgt-val.txt\r\n" + ] + } + ], + "source": [ + "ls toy-ende" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Prepare data and vocab" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As for any use case of OpenNMT-py 2.0, we can start by creating a simple YAML configuration with our datasets. This is the easiest way to build the proper `opts` `Namespace` that will be used to create the vocabulary(ies)." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "yaml_config = \"\"\"\n", + "## Where the vocab(s) will be written\n", + "save_data: toy-ende/run/example\n", + "# Corpus opts:\n", + "data:\n", + " corpus:\n", + " path_src: toy-ende/src-train.txt\n", + " path_tgt: toy-ende/tgt-train.txt\n", + " transforms: []\n", + " weight: 1\n", + " valid:\n", + " path_src: toy-ende/src-val.txt\n", + " path_tgt: toy-ende/tgt-val.txt\n", + " transforms: []\n", + "\"\"\"\n", + "config = yaml.safe_load(yaml_config)\n", + "with open(\"toy-ende/config.yaml\", \"w\") as f:\n", + " f.write(yaml_config)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from onmt.utils.parse import ArgumentParser\n", + "parser = ArgumentParser(description='build_vocab.py')" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "from onmt.opts import dynamic_prepare_opts\n", + "dynamic_prepare_opts(parser, build_vocab_only=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "base_args = ([\"-config\", \"toy-ende/config.yaml\", \"-n_sample\", \"10000\"])\n", + "opts, unknown = parser.parse_known_args(base_args)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Namespace(config='toy-ende/config.yaml', data=\"{'corpus': {'path_src': 'toy-ende/src-train.txt', 'path_tgt': 'toy-ende/tgt-train.txt', 'transforms': [], 'weight': 1}, 'valid': {'path_src': 'toy-ende/src-val.txt', 'path_tgt': 'toy-ende/tgt-val.txt', 'transforms': []}}\", insert_ratio=0.0, mask_length='subword', mask_ratio=0.0, n_sample=10000, onmttok_kwargs=\"{'mode': 'none'}\", overwrite=False, permute_sent_ratio=0.0, poisson_lambda=0.0, random_ratio=0.0, replace_length=-1, rotate_ratio=0.5, save_config=None, save_data='toy-ende/run/example', seed=-1, share_vocab=False, skip_empty_level='warning', src_seq_length=200, src_subword_model=None, src_subword_type='none', src_vocab=None, subword_alpha=0, subword_nbest=1, switchout_temperature=1.0, tgt_seq_length=200, tgt_subword_model=None, tgt_subword_type='none', tgt_vocab=None, tokendrop_temperature=1.0, tokenmask_temperature=1.0, transforms=[])" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opts" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2020-09-25 15:28:08,068 INFO] Parsed 2 corpora from -data.\n", + "[2020-09-25 15:28:08,069 INFO] Counter vocab from 10000 samples.\n", + "[2020-09-25 15:28:08,070 INFO] Save 10000 transformed example/corpus.\n", + "[2020-09-25 15:28:08,070 INFO] corpus's transforms: TransformPipe()\n", + "[2020-09-25 15:28:08,101 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:28:08,320 INFO] Just finished the first loop\n", + "[2020-09-25 15:28:08,320 INFO] Counters src:24995\n", + "[2020-09-25 15:28:08,321 INFO] Counters tgt:35816\n" + ] + } + ], + "source": [ + "from onmt.bin.build_vocab import build_vocab_main\n", + "build_vocab_main(opts)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "example.vocab.src example.vocab.tgt \u001b[0m\u001b[01;34msample\u001b[0m/\r\n" + ] + } + ], + "source": [ + "ls toy-ende/run" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We just created our source and target vocabularies, respectively `toy-ende/run/example.vocab.src` and `toy-ende/run/example.vocab.tgt`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Build fields" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can build the fields from the text files that were just created." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "src_vocab_path = \"toy-ende/run/example.vocab.src\"\n", + "tgt_vocab_path = \"toy-ende/run/example.vocab.tgt\"" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2020-09-25 15:28:08,495 INFO] Loading src vocabulary from toy-ende/run/example.vocab.src\n", + "[2020-09-25 15:28:08,554 INFO] Loaded src vocab has 24995 tokens.\n", + "[2020-09-25 15:28:08,562 INFO] Loading tgt vocabulary from toy-ende/run/example.vocab.tgt\n", + "[2020-09-25 15:28:08,617 INFO] Loaded tgt vocab has 35816 tokens.\n" + ] + } + ], + "source": [ + "# initialize the frequency counter\n", + "counters = defaultdict(Counter)\n", + "# load source vocab\n", + "_src_vocab, _src_vocab_size = _load_vocab(\n", + " src_vocab_path,\n", + " 'src',\n", + " counters)\n", + "# load target vocab\n", + "_tgt_vocab, _tgt_vocab_size = _load_vocab(\n", + " tgt_vocab_path,\n", + " 'tgt',\n", + " counters)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "# initialize fields\n", + "src_nfeats, tgt_nfeats = 0, 0 # do not support word features for now\n", + "fields = get_fields(\n", + " 'text', src_nfeats, tgt_nfeats)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'src': ,\n", + " 'tgt': ,\n", + " 'indices': }" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fields" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2020-09-25 15:28:08,699 INFO] * tgt vocab size: 30004.\n", + "[2020-09-25 15:28:08,749 INFO] * src vocab size: 24997.\n" + ] + } + ], + "source": [ + "# build fields vocab\n", + "share_vocab = False\n", + "vocab_size_multiple = 1\n", + "src_vocab_size = 30000\n", + "tgt_vocab_size = 30000\n", + "src_words_min_frequency = 1\n", + "tgt_words_min_frequency = 1\n", + "vocab_fields = _build_fields_vocab(\n", + " fields, counters, 'text', share_vocab,\n", + " vocab_size_multiple,\n", + " src_vocab_size, src_words_min_frequency,\n", + " tgt_vocab_size, tgt_words_min_frequency)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "An alternative way of creating these fields is to run `onmt_train` without actually training, to just output the necessary files." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Prepare for training: model and optimizer creation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's get a few fields/vocab related variables to simplify the model creation a bit:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "src_text_field = vocab_fields[\"src\"].base_field\n", + "src_vocab = src_text_field.vocab\n", + "src_padding = src_vocab.stoi[src_text_field.pad_token]\n", + "\n", + "tgt_text_field = vocab_fields['tgt'].base_field\n", + "tgt_vocab = tgt_text_field.vocab\n", + "tgt_padding = tgt_vocab.stoi[tgt_text_field.pad_token]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next we specify the core model itself. Here we will build a small model with an encoder and an attention based input feeding decoder. Both models will be RNNs and the encoder will be bidirectional" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "emb_size = 100\n", + "rnn_size = 500\n", + "# Specify the core model.\n", + "\n", + "encoder_embeddings = onmt.modules.Embeddings(emb_size, len(src_vocab),\n", + " word_padding_idx=src_padding)\n", + "\n", + "encoder = onmt.encoders.RNNEncoder(hidden_size=rnn_size, num_layers=1,\n", + " rnn_type=\"LSTM\", bidirectional=True,\n", + " embeddings=encoder_embeddings)\n", + "\n", + "decoder_embeddings = onmt.modules.Embeddings(emb_size, len(tgt_vocab),\n", + " word_padding_idx=tgt_padding)\n", + "decoder = onmt.decoders.decoder.InputFeedRNNDecoder(\n", + " hidden_size=rnn_size, num_layers=1, bidirectional_encoder=True, \n", + " rnn_type=\"LSTM\", embeddings=decoder_embeddings)\n", + "\n", + "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", + "model = onmt.models.model.NMTModel(encoder, decoder)\n", + "model.to(device)\n", + "\n", + "# Specify the tgt word generator and loss computation module\n", + "model.generator = nn.Sequential(\n", + " nn.Linear(rnn_size, len(tgt_vocab)),\n", + " nn.LogSoftmax(dim=-1)).to(device)\n", + "\n", + "loss = onmt.utils.loss.NMTLossCompute(\n", + " criterion=nn.NLLLoss(ignore_index=tgt_padding, reduction=\"sum\"),\n", + " generator=model.generator)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we set up the optimizer. This could be a core torch optim class, or our wrapper which handles learning rate updates and gradient normalization automatically." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "lr = 1\n", + "torch_optimizer = torch.optim.SGD(model.parameters(), lr=lr)\n", + "optim = onmt.utils.optimizers.Optimizer(\n", + " torch_optimizer, learning_rate=lr, max_grad_norm=2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create the training and validation data iterators" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we need to create the dynamic dataset iterator.\n", + "\n", + "This is not very 'library-friendly' for now because of the way the `DynamicDatasetIter` constructor is defined. It may evolve in the future." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "src_train = \"toy-ende/src-train.txt\"\n", + "tgt_train = \"toy-ende/tgt-train.txt\"\n", + "src_val = \"toy-ende/src-val.txt\"\n", + "tgt_val = \"toy-ende/tgt-val.txt\"\n", + "\n", + "# build the ParallelCorpus\n", + "corpus = ParallelCorpus(\"corpus\", src_train, tgt_train)\n", + "valid = ParallelCorpus(\"valid\", src_val, tgt_val)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "# build the training iterator\n", + "train_iter = DynamicDatasetIter(\n", + " corpora={\"corpus\": corpus},\n", + " corpora_info={\"corpus\": {\"weight\": 1}},\n", + " transforms={},\n", + " fields=vocab_fields,\n", + " is_train=True,\n", + " batch_type=\"tokens\",\n", + " batch_size=4096,\n", + " batch_size_multiple=1,\n", + " data_type=\"text\")" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "# make sure the iteration happens on GPU 0 (-1 for CPU, N for GPU N)\n", + "train_iter = iter(IterOnDevice(train_iter, 0))" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "# build the validation iterator\n", + "valid_iter = DynamicDatasetIter(\n", + " corpora={\"valid\": valid},\n", + " corpora_info={\"valid\": {\"weight\": 1}},\n", + " transforms={},\n", + " fields=vocab_fields,\n", + " is_train=False,\n", + " batch_type=\"sents\",\n", + " batch_size=8,\n", + " batch_size_multiple=1,\n", + " data_type=\"text\")" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [], + "source": [ + "valid_iter = IterOnDevice(valid_iter, 0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Training" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally we train." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2020-09-25 15:28:15,184 INFO] Start training loop and validate every 500 steps...\n", + "[2020-09-25 15:28:15,185 INFO] corpus's transforms: TransformPipe()\n", + "[2020-09-25 15:28:15,187 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:28:21,140 INFO] Step 50/ 1000; acc: 7.52; ppl: 8832.29; xent: 9.09; lr: 1.00000; 18916/18871 tok/s; 6 sec\n", + "[2020-09-25 15:28:24,869 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:28:27,121 INFO] Step 100/ 1000; acc: 9.34; ppl: 1840.06; xent: 7.52; lr: 1.00000; 18911/18785 tok/s; 12 sec\n", + "[2020-09-25 15:28:33,048 INFO] Step 150/ 1000; acc: 10.35; ppl: 1419.18; xent: 7.26; lr: 1.00000; 19062/19017 tok/s; 18 sec\n", + "[2020-09-25 15:28:37,019 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:28:39,022 INFO] Step 200/ 1000; acc: 11.14; ppl: 1127.44; xent: 7.03; lr: 1.00000; 19084/18911 tok/s; 24 sec\n", + "[2020-09-25 15:28:45,073 INFO] Step 250/ 1000; acc: 12.46; ppl: 912.13; xent: 6.82; lr: 1.00000; 18575/18570 tok/s; 30 sec\n", + "[2020-09-25 15:28:49,301 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:28:51,151 INFO] Step 300/ 1000; acc: 13.04; ppl: 779.50; xent: 6.66; lr: 1.00000; 18394/18307 tok/s; 36 sec\n", + "[2020-09-25 15:28:57,316 INFO] Step 350/ 1000; acc: 14.04; ppl: 685.48; xent: 6.53; lr: 1.00000; 18339/18173 tok/s; 42 sec\n", + "[2020-09-25 15:29:02,117 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:29:03,576 INFO] Step 400/ 1000; acc: 14.99; ppl: 590.20; xent: 6.38; lr: 1.00000; 18090/18029 tok/s; 48 sec\n", + "[2020-09-25 15:29:09,546 INFO] Step 450/ 1000; acc: 16.00; ppl: 524.51; xent: 6.26; lr: 1.00000; 18726/18536 tok/s; 54 sec\n", + "[2020-09-25 15:29:14,585 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:29:15,596 INFO] Step 500/ 1000; acc: 16.78; ppl: 453.38; xent: 6.12; lr: 1.00000; 17877/17980 tok/s; 60 sec\n", + "[2020-09-25 15:29:15,597 INFO] valid's transforms: TransformPipe()\n", + "[2020-09-25 15:29:15,599 INFO] Loading ParallelCorpus(toy-ende/src-val.txt, toy-ende/tgt-val.txt, align=None)...\n", + "[2020-09-25 15:29:24,528 INFO] Validation perplexity: 295.1\n", + "[2020-09-25 15:29:24,529 INFO] Validation accuracy: 17.6533\n", + "[2020-09-25 15:29:30,592 INFO] Step 550/ 1000; acc: 17.47; ppl: 421.26; xent: 6.04; lr: 1.00000; 7726/7610 tok/s; 75 sec\n", + "[2020-09-25 15:29:36,055 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:29:36,695 INFO] Step 600/ 1000; acc: 18.95; ppl: 354.44; xent: 5.87; lr: 1.00000; 17470/17598 tok/s; 82 sec\n", + "[2020-09-25 15:29:42,794 INFO] Step 650/ 1000; acc: 19.60; ppl: 328.47; xent: 5.79; lr: 1.00000; 18994/18793 tok/s; 88 sec\n", + "[2020-09-25 15:29:48,635 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:29:48,924 INFO] Step 700/ 1000; acc: 20.57; ppl: 285.48; xent: 5.65; lr: 1.00000; 17856/17788 tok/s; 94 sec\n", + "[2020-09-25 15:29:54,898 INFO] Step 750/ 1000; acc: 21.97; ppl: 249.06; xent: 5.52; lr: 1.00000; 19030/18924 tok/s; 100 sec\n", + "[2020-09-25 15:30:01,233 INFO] Step 800/ 1000; acc: 22.66; ppl: 228.54; xent: 5.43; lr: 1.00000; 17571/17471 tok/s; 106 sec\n", + "[2020-09-25 15:30:01,357 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:30:07,345 INFO] Step 850/ 1000; acc: 24.32; ppl: 193.65; xent: 5.27; lr: 1.00000; 18344/18313 tok/s; 112 sec\n", + "[2020-09-25 15:30:11,363 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:30:13,487 INFO] Step 900/ 1000; acc: 24.93; ppl: 177.65; xent: 5.18; lr: 1.00000; 18293/18105 tok/s; 118 sec\n", + "[2020-09-25 15:30:19,670 INFO] Step 950/ 1000; acc: 26.33; ppl: 157.10; xent: 5.06; lr: 1.00000; 17791/17746 tok/s; 124 sec\n", + "[2020-09-25 15:30:24,072 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)...\n", + "[2020-09-25 15:30:25,820 INFO] Step 1000/ 1000; acc: 27.47; ppl: 137.64; xent: 4.92; lr: 1.00000; 17942/17962 tok/s; 131 sec\n", + "[2020-09-25 15:30:25,822 INFO] Loading ParallelCorpus(toy-ende/src-val.txt, toy-ende/tgt-val.txt, align=None)...\n", + "[2020-09-25 15:30:34,665 INFO] Validation perplexity: 241.801\n", + "[2020-09-25 15:30:34,666 INFO] Validation accuracy: 20.2837\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "report_manager = onmt.utils.ReportMgr(\n", + " report_every=50, start_time=None, tensorboard_writer=None)\n", + "\n", + "trainer = onmt.Trainer(model=model,\n", + " train_loss=loss,\n", + " valid_loss=loss,\n", + " optim=optim,\n", + " report_manager=report_manager,\n", + " dropout=[0.1])\n", + "\n", + "trainer.train(train_iter=train_iter,\n", + " train_steps=1000,\n", + " valid_iter=valid_iter,\n", + " valid_steps=500)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Translate" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For translation, we can build a \"traditional\" (as opposed to dynamic) dataset for now." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "src_data = {\"reader\": onmt.inputters.str2reader[\"text\"](), \"data\": src_val}\n", + "tgt_data = {\"reader\": onmt.inputters.str2reader[\"text\"](), \"data\": tgt_val}\n", + "_readers, _data = onmt.inputters.Dataset.config(\n", + " [('src', src_data), ('tgt', tgt_data)])" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "dataset = onmt.inputters.Dataset(\n", + " vocab_fields, readers=_readers, data=_data,\n", + " sort_key=onmt.inputters.str2sortkey[\"text\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "data_iter = onmt.inputters.OrderedIterator(\n", + " dataset=dataset,\n", + " device=\"cuda\",\n", + " batch_size=10,\n", + " train=False,\n", + " sort=False,\n", + " sort_within_batch=True,\n", + " shuffle=False\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "src_reader = onmt.inputters.str2reader[\"text\"]\n", + "tgt_reader = onmt.inputters.str2reader[\"text\"]\n", + "scorer = GNMTGlobalScorer(alpha=0.7, \n", + " beta=0., \n", + " length_penalty=\"avg\", \n", + " coverage_penalty=\"none\")\n", + "gpu = 0 if torch.cuda.is_available() else -1\n", + "translator = Translator(model=model, \n", + " fields=vocab_fields, \n", + " src_reader=src_reader, \n", + " tgt_reader=tgt_reader, \n", + " global_scorer=scorer,\n", + " gpu=gpu)\n", + "builder = onmt.translate.TranslationBuilder(data=dataset, \n", + " fields=vocab_fields)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Note**: translations will be very poor, because of the very low quantity of data, the absence of proper tokenization, and the brevity of the training." + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "SENT 0: ['Parliament', 'Does', 'Not', 'Support', 'Amendment', 'Freeing', 'Tymoshenko']\n", + "PRED 0: Parlament das Parlament über die Europäische Parlament , die sich in der Lage in der Lage ist , die es in der Lage sind .\n", + "PRED SCORE: -1.5935\n", + "\n", + "\n", + "SENT 0: ['Today', ',', 'the', 'Ukraine', 'parliament', 'dismissed', ',', 'within', 'the', 'Code', 'of', 'Criminal', 'Procedure', 'amendment', ',', 'the', 'motion', 'to', 'revoke', 'an', 'article', 'based', 'on', 'which', 'the', 'opposition', 'leader', ',', 'Yulia', 'Tymoshenko', ',', 'was', 'sentenced', '.']\n", + "PRED 0: In der Nähe des Hotels , die in der Lage , die sich in der Lage ist , in der Lage , die in der Lage , die in der Lage ist .\n", + "PRED SCORE: -1.7173\n", + "\n", + "\n", + "SENT 0: ['The', 'amendment', 'that', 'would', 'lead', 'to', 'freeing', 'the', 'imprisoned', 'former', 'Prime', 'Minister', 'was', 'revoked', 'during', 'second', 'reading', 'of', 'the', 'proposal', 'for', 'mitigation', 'of', 'sentences', 'for', 'economic', 'offences', '.']\n", + "PRED 0: Die Tatsache , die sich in der Lage in der Lage ist , die für eine Antwort der Entwicklung für die Entwicklung von Präsident .\n", + "PRED SCORE: -1.6834\n", + "\n", + "\n", + "SENT 0: ['In', 'October', ',', 'Tymoshenko', 'was', 'sentenced', 'to', 'seven', 'years', 'in', 'prison', 'for', 'entering', 'into', 'what', 'was', 'reported', 'to', 'be', 'a', 'disadvantageous', 'gas', 'deal', 'with', 'Russia', '.']\n", + "PRED 0: In der Nähe wurde die Menschen in der Lage ist , die sich in der Lage .\n", + "PRED SCORE: -1.5765\n", + "\n", + "\n", + "SENT 0: ['The', 'verdict', 'is', 'not', 'yet', 'final;', 'the', 'court', 'will', 'hear', 'Tymoshenko', ''s', 'appeal', 'in', 'December', '.']\n", + "PRED 0: Es ist nicht der Fall , die in der Lage in der Lage sind .\n", + "PRED SCORE: -1.3287\n", + "\n", + "\n", + "SENT 0: ['Tymoshenko', 'claims', 'the', 'verdict', 'is', 'a', 'political', 'revenge', 'of', 'the', 'regime;', 'in', 'the', 'West', ',', 'the', 'trial', 'has', 'also', 'evoked', 'suspicion', 'of', 'being', 'biased', '.']\n", + "PRED 0: Um in der Lage ist auch eine Lösung Rolle .\n", + "PRED SCORE: -1.3975\n", + "\n", + "\n", + "SENT 0: ['The', 'proposal', 'to', 'remove', 'Article', '365', 'from', 'the', 'Code', 'of', 'Criminal', 'Procedure', ',', 'upon', 'which', 'the', 'former', 'Prime', 'Minister', 'was', 'sentenced', ',', 'was', 'supported', 'by', '147', 'members', 'of', 'parliament', '.']\n", + "PRED 0: Der Vorschlag , die in der Lage , die in der Lage , die in der Lage ist , war er von der Fall wurde .\n", + "PRED SCORE: -1.6062\n", + "\n", + "\n", + "SENT 0: ['Its', 'ratification', 'would', 'require', '226', 'votes', '.']\n", + "PRED 0: Es wäre noch einmal noch einmal .\n", + "PRED SCORE: -1.8001\n", + "\n", + "\n", + "SENT 0: ['Libya', ''s', 'Victory']\n", + "PRED 0: In der Nähe des Hotels befindet sich in der Nähe des Hotels in der Lage .\n", + "PRED SCORE: -1.7097\n", + "\n", + "\n", + "SENT 0: ['The', 'story', 'of', 'Libya', ''s', 'liberation', ',', 'or', 'rebellion', ',', 'already', 'has', 'its', 'defeated', '.']\n", + "PRED 0: In der Nähe des Hotels in der Lage ist in der Lage .\n", + "PRED SCORE: -1.7885\n", + "\n" + ] + } + ], + "source": [ + "for batch in data_iter:\n", + " trans_batch = translator.translate_batch(\n", + " batch=batch, src_vocabs=[src_vocab],\n", + " attn_debug=False)\n", + " translations = builder.from_batch(trans_batch)\n", + " for trans in translations:\n", + " print(trans.log(0))\n", + " break" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/source/examples/Library.md b/docs/source/examples/Library.md new file mode 100644 index 0000000000..47c0b922c4 --- /dev/null +++ b/docs/source/examples/Library.md @@ -0,0 +1,552 @@ + +# Library + +The example notebook (available [here](https://github.com/OpenNMT/OpenNMT-py/blob/master/docs/source/examples/Library.ipynb)) should be able to run as a standalone execution, provided `onmt` is in the path (installed via `pip` for instance). + +Some parts may not be 100% 'library-friendly' but it's mostly workable. + +### Import a few modules and functions that will be necessary + + +```python +import yaml +import torch +import torch.nn as nn +from argparse import Namespace +from collections import defaultdict, Counter +``` + + +```python +import onmt +from onmt.inputters.inputter import _load_vocab, _build_fields_vocab, get_fields, IterOnDevice +from onmt.inputters.corpus import ParallelCorpus +from onmt.inputters.dynamic_iterator import DynamicDatasetIter +from onmt.translate import GNMTGlobalScorer, Translator, TranslationBuilder +from onmt.utils.misc import set_random_seed +``` + +### Enable logging + + +```python +# enable logging +from onmt.utils.logging import init_logger, logger +init_logger() +``` + + + + + + + + +### Set random seed + + +```python +is_cuda = torch.cuda.is_available() +set_random_seed(1111, is_cuda) +``` + +### Retrieve data + +To make a proper example, we will need some data, as well as some vocabulary(ies). + +Let's take the same data as in the [quickstart](https://opennmt.net/OpenNMT-py/quickstart.html): + + +```python +!wget https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz +``` + + --2020-09-25 15:28:05-- https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz + Resolving s3.amazonaws.com (s3.amazonaws.com)... 52.217.18.38 + Connecting to s3.amazonaws.com (s3.amazonaws.com)|52.217.18.38|:443... connected. + HTTP request sent, awaiting response... 200 OK + Length: 1662081 (1,6M) [application/x-gzip] + Saving to: ‘toy-ende.tar.gz.5’ + + toy-ende.tar.gz.5 100%[===================>] 1,58M 2,33MB/s in 0,7s + + 2020-09-25 15:28:07 (2,33 MB/s) - ‘toy-ende.tar.gz.5’ saved [1662081/1662081] + + + + +```python +!tar xf toy-ende.tar.gz +``` + + +```python +ls toy-ende +``` + + config.yaml src-test.txt src-val.txt tgt-train.txt + run/ src-train.txt tgt-test.txt tgt-val.txt + + +### Prepare data and vocab + +As for any use case of OpenNMT-py 2.0, we can start by creating a simple YAML configuration with our datasets. This is the easiest way to build the proper `opts` `Namespace` that will be used to create the vocabulary(ies). + + +```python +yaml_config = """ +## Where the vocab(s) will be written +save_data: toy-ende/run/example +# Corpus opts: +data: + corpus: + path_src: toy-ende/src-train.txt + path_tgt: toy-ende/tgt-train.txt + transforms: [] + weight: 1 + valid: + path_src: toy-ende/src-val.txt + path_tgt: toy-ende/tgt-val.txt + transforms: [] +""" +config = yaml.safe_load(yaml_config) +with open("toy-ende/config.yaml", "w") as f: + f.write(yaml_config) +``` + + +```python +from onmt.utils.parse import ArgumentParser +parser = DynamicArgumentParser(description='build_vocab.py') +``` + + +```python +from onmt.opts import dynamic_prepare_opts +dynamic_prepare_opts(parser, build_vocab_only=True) +``` + + +```python +base_args = (["-config", "toy-ende/config.yaml", "-n_sample", "10000"]) +opts, unknown = parser.parse_known_args(base_args) +``` + + +```python +opts +``` + + + + + Namespace(config='toy-ende/config.yaml', data="{'corpus': {'path_src': 'toy-ende/src-train.txt', 'path_tgt': 'toy-ende/tgt-train.txt', 'transforms': [], 'weight': 1}, 'valid': {'path_src': 'toy-ende/src-val.txt', 'path_tgt': 'toy-ende/tgt-val.txt', 'transforms': []}}", insert_ratio=0.0, mask_length='subword', mask_ratio=0.0, n_sample=10000, onmttok_kwargs="{'mode': 'none'}", overwrite=False, permute_sent_ratio=0.0, poisson_lambda=0.0, random_ratio=0.0, replace_length=-1, rotate_ratio=0.5, save_config=None, save_data='toy-ende/run/example', seed=-1, share_vocab=False, skip_empty_level='warning', src_seq_length=200, src_subword_model=None, src_subword_type='none', src_vocab=None, subword_alpha=0, subword_nbest=1, switchout_temperature=1.0, tgt_seq_length=200, tgt_subword_model=None, tgt_subword_type='none', tgt_vocab=None, tokendrop_temperature=1.0, tokenmask_temperature=1.0, transforms=[]) + + + + +```python +from onmt.bin.build_vocab import build_vocab_main +build_vocab_main(opts) +``` + + [2020-09-25 15:28:08,068 INFO] Parsed 2 corpora from -data. + [2020-09-25 15:28:08,069 INFO] Counter vocab from 10000 samples. + [2020-09-25 15:28:08,070 INFO] Save 10000 transformed example/corpus. + [2020-09-25 15:28:08,070 INFO] corpus's transforms: TransformPipe() + [2020-09-25 15:28:08,101 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:28:08,320 INFO] Just finished the first loop + [2020-09-25 15:28:08,320 INFO] Counters src:24995 + [2020-09-25 15:28:08,321 INFO] Counters tgt:35816 + + + +```python +ls toy-ende/run +``` + + example.vocab.src example.vocab.tgt sample/ + + +We just created our source and target vocabularies, respectively `toy-ende/run/example.vocab.src` and `toy-ende/run/example.vocab.tgt`. + +### Build fields + +We can build the fields from the text files that were just created. + + +```python +src_vocab_path = "toy-ende/run/example.vocab.src" +tgt_vocab_path = "toy-ende/run/example.vocab.tgt" +``` + + +```python +# initialize the frequency counter +counters = defaultdict(Counter) +# load source vocab +_src_vocab, _src_vocab_size = _load_vocab( + src_vocab_path, + 'src', + counters) +# load target vocab +_tgt_vocab, _tgt_vocab_size = _load_vocab( + tgt_vocab_path, + 'tgt', + counters) +``` + + [2020-09-25 15:28:08,495 INFO] Loading src vocabulary from toy-ende/run/example.vocab.src + [2020-09-25 15:28:08,554 INFO] Loaded src vocab has 24995 tokens. + [2020-09-25 15:28:08,562 INFO] Loading tgt vocabulary from toy-ende/run/example.vocab.tgt + [2020-09-25 15:28:08,617 INFO] Loaded tgt vocab has 35816 tokens. + + + +```python +# initialize fields +src_nfeats, tgt_nfeats = 0, 0 # do not support word features for now +fields = get_fields( + 'text', src_nfeats, tgt_nfeats) +``` + + +```python +fields +``` + + + + + {'src': , + 'tgt': , + 'indices': } + + + + +```python +# build fields vocab +share_vocab = False +vocab_size_multiple = 1 +src_vocab_size = 30000 +tgt_vocab_size = 30000 +src_words_min_frequency = 1 +tgt_words_min_frequency = 1 +vocab_fields = _build_fields_vocab( + fields, counters, 'text', share_vocab, + vocab_size_multiple, + src_vocab_size, src_words_min_frequency, + tgt_vocab_size, tgt_words_min_frequency) +``` + + [2020-09-25 15:28:08,699 INFO] * tgt vocab size: 30004. + [2020-09-25 15:28:08,749 INFO] * src vocab size: 24997. + + +An alternative way of creating these fields is to run `onmt_train` without actually training, to just output the necessary files. + +### Prepare for training: model and optimizer creation + +Let's get a few fields/vocab related variables to simplify the model creation a bit: + + +```python +src_text_field = vocab_fields["src"].base_field +src_vocab = src_text_field.vocab +src_padding = src_vocab.stoi[src_text_field.pad_token] + +tgt_text_field = vocab_fields['tgt'].base_field +tgt_vocab = tgt_text_field.vocab +tgt_padding = tgt_vocab.stoi[tgt_text_field.pad_token] +``` + +Next we specify the core model itself. Here we will build a small model with an encoder and an attention based input feeding decoder. Both models will be RNNs and the encoder will be bidirectional + + +```python +emb_size = 100 +rnn_size = 500 +# Specify the core model. + +encoder_embeddings = onmt.modules.Embeddings(emb_size, len(src_vocab), + word_padding_idx=src_padding) + +encoder = onmt.encoders.RNNEncoder(hidden_size=rnn_size, num_layers=1, + rnn_type="LSTM", bidirectional=True, + embeddings=encoder_embeddings) + +decoder_embeddings = onmt.modules.Embeddings(emb_size, len(tgt_vocab), + word_padding_idx=tgt_padding) +decoder = onmt.decoders.decoder.InputFeedRNNDecoder( + hidden_size=rnn_size, num_layers=1, bidirectional_encoder=True, + rnn_type="LSTM", embeddings=decoder_embeddings) + +device = "cuda" if torch.cuda.is_available() else "cpu" +model = onmt.models.model.NMTModel(encoder, decoder) +model.to(device) + +# Specify the tgt word generator and loss computation module +model.generator = nn.Sequential( + nn.Linear(rnn_size, len(tgt_vocab)), + nn.LogSoftmax(dim=-1)).to(device) + +loss = onmt.utils.loss.NMTLossCompute( + criterion=nn.NLLLoss(ignore_index=tgt_padding, reduction="sum"), + generator=model.generator) +``` + +Now we set up the optimizer. This could be a core torch optim class, or our wrapper which handles learning rate updates and gradient normalization automatically. + + +```python +lr = 1 +torch_optimizer = torch.optim.SGD(model.parameters(), lr=lr) +optim = onmt.utils.optimizers.Optimizer( + torch_optimizer, learning_rate=lr, max_grad_norm=2) +``` + +### Create the training and validation data iterators + +Now we need to create the dynamic dataset iterator. + +This is not very 'library-friendly' for now because of the way the `DynamicDatasetIter` constructor is defined. It may evolve in the future. + + +```python +src_train = "toy-ende/src-train.txt" +tgt_train = "toy-ende/tgt-train.txt" +src_val = "toy-ende/src-val.txt" +tgt_val = "toy-ende/tgt-val.txt" + +# build the ParallelCorpus +corpus = ParallelCorpus("corpus", src_train, tgt_train) +valid = ParallelCorpus("valid", src_val, tgt_val) +``` + + +```python +# build the training iterator +train_iter = DynamicDatasetIter( + corpora={"corpus": corpus}, + corpora_info={"corpus": {"weight": 1}}, + transforms={}, + fields=vocab_fields, + is_train=True, + batch_type="tokens", + batch_size=4096, + batch_size_multiple=1, + data_type="text") +``` + + +```python +# make sure the iteration happens on GPU 0 (-1 for CPU, N for GPU N) +train_iter = iter(IterOnDevice(train_iter, 0)) +``` + + +```python +# build the validation iterator +valid_iter = DynamicDatasetIter( + corpora={"valid": valid}, + corpora_info={"valid": {"weight": 1}}, + transforms={}, + fields=vocab_fields, + is_train=False, + batch_type="sents", + batch_size=8, + batch_size_multiple=1, + data_type="text") +``` + + +```python +valid_iter = IterOnDevice(valid_iter, 0) +``` + +### Training + +Finally we train. + + +```python +report_manager = onmt.utils.ReportMgr( + report_every=50, start_time=None, tensorboard_writer=None) + +trainer = onmt.Trainer(model=model, + train_loss=loss, + valid_loss=loss, + optim=optim, + report_manager=report_manager, + dropout=[0.1]) + +trainer.train(train_iter=train_iter, + train_steps=1000, + valid_iter=valid_iter, + valid_steps=500) +``` + + [2020-09-25 15:28:15,184 INFO] Start training loop and validate every 500 steps... + [2020-09-25 15:28:15,185 INFO] corpus's transforms: TransformPipe() + [2020-09-25 15:28:15,187 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:28:21,140 INFO] Step 50/ 1000; acc: 7.52; ppl: 8832.29; xent: 9.09; lr: 1.00000; 18916/18871 tok/s; 6 sec + [2020-09-25 15:28:24,869 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:28:27,121 INFO] Step 100/ 1000; acc: 9.34; ppl: 1840.06; xent: 7.52; lr: 1.00000; 18911/18785 tok/s; 12 sec + [2020-09-25 15:28:33,048 INFO] Step 150/ 1000; acc: 10.35; ppl: 1419.18; xent: 7.26; lr: 1.00000; 19062/19017 tok/s; 18 sec + [2020-09-25 15:28:37,019 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:28:39,022 INFO] Step 200/ 1000; acc: 11.14; ppl: 1127.44; xent: 7.03; lr: 1.00000; 19084/18911 tok/s; 24 sec + [2020-09-25 15:28:45,073 INFO] Step 250/ 1000; acc: 12.46; ppl: 912.13; xent: 6.82; lr: 1.00000; 18575/18570 tok/s; 30 sec + [2020-09-25 15:28:49,301 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:28:51,151 INFO] Step 300/ 1000; acc: 13.04; ppl: 779.50; xent: 6.66; lr: 1.00000; 18394/18307 tok/s; 36 sec + [2020-09-25 15:28:57,316 INFO] Step 350/ 1000; acc: 14.04; ppl: 685.48; xent: 6.53; lr: 1.00000; 18339/18173 tok/s; 42 sec + [2020-09-25 15:29:02,117 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:29:03,576 INFO] Step 400/ 1000; acc: 14.99; ppl: 590.20; xent: 6.38; lr: 1.00000; 18090/18029 tok/s; 48 sec + [2020-09-25 15:29:09,546 INFO] Step 450/ 1000; acc: 16.00; ppl: 524.51; xent: 6.26; lr: 1.00000; 18726/18536 tok/s; 54 sec + [2020-09-25 15:29:14,585 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:29:15,596 INFO] Step 500/ 1000; acc: 16.78; ppl: 453.38; xent: 6.12; lr: 1.00000; 17877/17980 tok/s; 60 sec + [2020-09-25 15:29:15,597 INFO] valid's transforms: TransformPipe() + [2020-09-25 15:29:15,599 INFO] Loading ParallelCorpus(toy-ende/src-val.txt, toy-ende/tgt-val.txt, align=None)... + [2020-09-25 15:29:24,528 INFO] Validation perplexity: 295.1 + [2020-09-25 15:29:24,529 INFO] Validation accuracy: 17.6533 + [2020-09-25 15:29:30,592 INFO] Step 550/ 1000; acc: 17.47; ppl: 421.26; xent: 6.04; lr: 1.00000; 7726/7610 tok/s; 75 sec + [2020-09-25 15:29:36,055 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:29:36,695 INFO] Step 600/ 1000; acc: 18.95; ppl: 354.44; xent: 5.87; lr: 1.00000; 17470/17598 tok/s; 82 sec + [2020-09-25 15:29:42,794 INFO] Step 650/ 1000; acc: 19.60; ppl: 328.47; xent: 5.79; lr: 1.00000; 18994/18793 tok/s; 88 sec + [2020-09-25 15:29:48,635 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:29:48,924 INFO] Step 700/ 1000; acc: 20.57; ppl: 285.48; xent: 5.65; lr: 1.00000; 17856/17788 tok/s; 94 sec + [2020-09-25 15:29:54,898 INFO] Step 750/ 1000; acc: 21.97; ppl: 249.06; xent: 5.52; lr: 1.00000; 19030/18924 tok/s; 100 sec + [2020-09-25 15:30:01,233 INFO] Step 800/ 1000; acc: 22.66; ppl: 228.54; xent: 5.43; lr: 1.00000; 17571/17471 tok/s; 106 sec + [2020-09-25 15:30:01,357 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:30:07,345 INFO] Step 850/ 1000; acc: 24.32; ppl: 193.65; xent: 5.27; lr: 1.00000; 18344/18313 tok/s; 112 sec + [2020-09-25 15:30:11,363 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:30:13,487 INFO] Step 900/ 1000; acc: 24.93; ppl: 177.65; xent: 5.18; lr: 1.00000; 18293/18105 tok/s; 118 sec + [2020-09-25 15:30:19,670 INFO] Step 950/ 1000; acc: 26.33; ppl: 157.10; xent: 5.06; lr: 1.00000; 17791/17746 tok/s; 124 sec + [2020-09-25 15:30:24,072 INFO] Loading ParallelCorpus(toy-ende/src-train.txt, toy-ende/tgt-train.txt, align=None)... + [2020-09-25 15:30:25,820 INFO] Step 1000/ 1000; acc: 27.47; ppl: 137.64; xent: 4.92; lr: 1.00000; 17942/17962 tok/s; 131 sec + [2020-09-25 15:30:25,822 INFO] Loading ParallelCorpus(toy-ende/src-val.txt, toy-ende/tgt-val.txt, align=None)... + [2020-09-25 15:30:34,665 INFO] Validation perplexity: 241.801 + [2020-09-25 15:30:34,666 INFO] Validation accuracy: 20.2837 + + + + + + + + + +### Translate + +For translation, we can build a "traditional" (as opposed to dynamic) dataset for now. + + +```python +src_data = {"reader": onmt.inputters.str2reader["text"](), "data": src_val} +tgt_data = {"reader": onmt.inputters.str2reader["text"](), "data": tgt_val} +_readers, _data = onmt.inputters.Dataset.config( + [('src', src_data), ('tgt', tgt_data)]) +``` + + +```python +dataset = onmt.inputters.Dataset( + vocab_fields, readers=_readers, data=_data, + sort_key=onmt.inputters.str2sortkey["text"]) +``` + + +```python +data_iter = onmt.inputters.OrderedIterator( + dataset=dataset, + device="cuda", + batch_size=10, + train=False, + sort=False, + sort_within_batch=True, + shuffle=False + ) +``` + + +```python +src_reader = onmt.inputters.str2reader["text"] +tgt_reader = onmt.inputters.str2reader["text"] +scorer = GNMTGlobalScorer(alpha=0.7, + beta=0., + length_penalty="avg", + coverage_penalty="none") +gpu = 0 if torch.cuda.is_available() else -1 +translator = Translator(model=model, + fields=vocab_fields, + src_reader=src_reader, + tgt_reader=tgt_reader, + global_scorer=scorer, + gpu=gpu) +builder = onmt.translate.TranslationBuilder(data=dataset, + fields=vocab_fields) +``` + +**Note**: translations will be very poor, because of the very low quantity of data, the absence of proper tokenization, and the brevity of the training. + + +```python +for batch in data_iter: + trans_batch = translator.translate_batch( + batch=batch, src_vocabs=[src_vocab], + attn_debug=False) + translations = builder.from_batch(trans_batch) + for trans in translations: + print(trans.log(0)) + break +``` + + + SENT 0: ['Parliament', 'Does', 'Not', 'Support', 'Amendment', 'Freeing', 'Tymoshenko'] + PRED 0: Parlament das Parlament über die Europäische Parlament , die sich in der Lage in der Lage ist , die es in der Lage sind . + PRED SCORE: -1.5935 + + + SENT 0: ['Today', ',', 'the', 'Ukraine', 'parliament', 'dismissed', ',', 'within', 'the', 'Code', 'of', 'Criminal', 'Procedure', 'amendment', ',', 'the', 'motion', 'to', 'revoke', 'an', 'article', 'based', 'on', 'which', 'the', 'opposition', 'leader', ',', 'Yulia', 'Tymoshenko', ',', 'was', 'sentenced', '.'] + PRED 0: In der Nähe des Hotels , die in der Lage , die sich in der Lage ist , in der Lage , die in der Lage , die in der Lage ist . + PRED SCORE: -1.7173 + + + SENT 0: ['The', 'amendment', 'that', 'would', 'lead', 'to', 'freeing', 'the', 'imprisoned', 'former', 'Prime', 'Minister', 'was', 'revoked', 'during', 'second', 'reading', 'of', 'the', 'proposal', 'for', 'mitigation', 'of', 'sentences', 'for', 'economic', 'offences', '.'] + PRED 0: Die Tatsache , die sich in der Lage in der Lage ist , die für eine Antwort der Entwicklung für die Entwicklung von Präsident . + PRED SCORE: -1.6834 + + + SENT 0: ['In', 'October', ',', 'Tymoshenko', 'was', 'sentenced', 'to', 'seven', 'years', 'in', 'prison', 'for', 'entering', 'into', 'what', 'was', 'reported', 'to', 'be', 'a', 'disadvantageous', 'gas', 'deal', 'with', 'Russia', '.'] + PRED 0: In der Nähe wurde die Menschen in der Lage ist , die sich in der Lage . + PRED SCORE: -1.5765 + + + SENT 0: ['The', 'verdict', 'is', 'not', 'yet', 'final;', 'the', 'court', 'will', 'hear', 'Tymoshenko', ''s', 'appeal', 'in', 'December', '.'] + PRED 0: Es ist nicht der Fall , die in der Lage in der Lage sind . + PRED SCORE: -1.3287 + + + SENT 0: ['Tymoshenko', 'claims', 'the', 'verdict', 'is', 'a', 'political', 'revenge', 'of', 'the', 'regime;', 'in', 'the', 'West', ',', 'the', 'trial', 'has', 'also', 'evoked', 'suspicion', 'of', 'being', 'biased', '.'] + PRED 0: Um in der Lage ist auch eine Lösung Rolle . + PRED SCORE: -1.3975 + + + SENT 0: ['The', 'proposal', 'to', 'remove', 'Article', '365', 'from', 'the', 'Code', 'of', 'Criminal', 'Procedure', ',', 'upon', 'which', 'the', 'former', 'Prime', 'Minister', 'was', 'sentenced', ',', 'was', 'supported', 'by', '147', 'members', 'of', 'parliament', '.'] + PRED 0: Der Vorschlag , die in der Lage , die in der Lage , die in der Lage ist , war er von der Fall wurde . + PRED SCORE: -1.6062 + + + SENT 0: ['Its', 'ratification', 'would', 'require', '226', 'votes', '.'] + PRED 0: Es wäre noch einmal noch einmal . + PRED SCORE: -1.8001 + + + SENT 0: ['Libya', ''s', 'Victory'] + PRED 0: In der Nähe des Hotels befindet sich in der Nähe des Hotels in der Lage . + PRED SCORE: -1.7097 + + + SENT 0: ['The', 'story', 'of', 'Libya', ''s', 'liberation', ',', 'or', 'rebellion', ',', 'already', 'has', 'its', 'defeated', '.'] + PRED 0: In der Nähe des Hotels in der Lage ist in der Lage . + PRED SCORE: -1.7885 diff --git a/docs/source/Summarization.md b/docs/source/examples/Summarization.md similarity index 66% rename from docs/source/Summarization.md rename to docs/source/examples/Summarization.md index 6781d73142..61cf0ff450 100644 --- a/docs/source/Summarization.md +++ b/docs/source/examples/Summarization.md @@ -1,6 +1,6 @@ # Summarization -Note: The process and results below are presented in our paper `Bottom-Up Abstractive Summarization`. Please consider citing it if you follow these instructions. +Note: The process and results below are presented in the paper `Bottom-Up Abstractive Summarization`. Please consider citing it if you follow these instructions. ``` @inproceedings{gehrmann2018bottom, @@ -25,49 +25,50 @@ An example article-title pair from Gigaword should look like this: *australian current account deficit narrows sharply* -### Preprocessing the data +### Preparing the data and vocab -Since we are using copy-attention [1] in the model, we need to preprocess the dataset such that source and target are aligned and use the same dictionary. This is achieved by using the options `dynamic_dict` and `share_vocab`. -We additionally turn off truncation of the source to ensure that inputs longer than 50 words are not truncated. For CNN-DM we follow See et al. [2] and additionally truncate the source length at 400 tokens and the target at 100. We also note that in CNN-DM, we found models to work better if the target surrounds sentences with tags such that a sentence looks like ` w1 w2 w3 . `. If you use this formatting, you can remove the tags after the inference step with the commands `sed -i 's/ <\/t>//g' FILE.txt` and `sed -i 's/ //g' FILE.txt`. -**Command used**: +**YAML Configuration**: -(1) CNN-DM +```yaml +# cnndm.yaml -```bash -onmt_preprocess -train_src data/cnndm/train.txt.src \ - -train_tgt data/cnndm/train.txt.tgt.tagged \ - -valid_src data/cnndm/val.txt.src \ - -valid_tgt data/cnndm/val.txt.tgt.tagged \ - -save_data data/cnndm/CNNDM \ - -src_seq_length 10000 \ - -tgt_seq_length 10000 \ - -src_seq_length_trunc 400 \ - -tgt_seq_length_trunc 100 \ - -dynamic_dict \ - -share_vocab \ - -shard_size 100000 -``` +## Where the vocab(s) will be written +save_data: cnndm/run/example +# Prevent overwriting existing files in the folder +overwrite: False -(2) Gigaword +# truncate examples +src_seq_length_trunc: 400 +tgt_seq_length_trunc: 100 +# common vocabulary for source and target +share_vocab: True + +# Corpus opts: +data: + cnndm: + path_src: cnndm/train.txt.src + path_tgt: cnndm/train.txt.tgt.tagged + valid: + path_src: cnndm/val.txt.src + path_tgt: cnndm/val.txt.tgt.tagged +... +``` + +Let's compute the vocab over the full dataset (`-n_sample -1`): ```bash -onmt_preprocess -train_src data/giga/train.article.txt \ - -train_tgt data/giga/train.title.txt \ - -valid_src data/giga/valid.article.txt \ - -valid_tgt data/giga/valid.title.txt \ - -save_data data/giga/GIGA \ - -src_seq_length 10000 \ - -dynamic_dict \ - -share_vocab \ - -shard_size 100000 +onmt_build_vocab -config cnndm.yaml -n_sample -1 ``` +This command will have written source and target vocabulary to `cnndm/run/example.vocab.src` and `cnndm/run/example.vocab.tgt`. These two files should be the same, as `share_vocab` is set. ### Training -The training procedure described in this section for the most part follows parameter choices and implementation similar to that of See et al. [2]. We describe notable options in the following list: +The training procedure described in this section for the most part follows parameter choices and implementation similar to that of See et al. [2]. + +Most significant options are: - `copy_attn`: This is the most important option, since it allows the model to copy words from the source. - `global_attention mlp`: This makes the model use the attention mechanism introduced by Bahdanau et al. [3] instead of that by Luong et al. [4] (`global_attention dot`). @@ -78,85 +79,120 @@ The training procedure described in this section for the most part follows param - `optim adagrad`: Adagrad outperforms SGD when coupled with the following option. - `adagrad_accumulator_init 0.1`: PyTorch does not initialize the accumulator in adagrad with any values. To match the optimization algorithm with the Tensorflow version, this option needs to be added. +Note: Since we are using copy-attention [1] in the model, additional fields will be computed so that source and target are aligned and use the same dictionary. Previously achieved with the `-dynamic_dict` preprocessing flag in the legacy version, this is now automatically handled when `-copy_attn` is enabled. We are using using a 128-dimensional word-embedding, and 512-dimensional 1 layer LSTM. On the encoder side, we use a bidirectional LSTM (`brnn`), which means that the 512 dimensions are split into 256 dimensions per direction. We additionally set the maximum norm of the gradient to 2, and renormalize if the gradient norm exceeds this value and do not use any dropout. -**commands used**: +**Configurations**: (1) CNN-DM -```bash -onmt_train -save_model models/cnndm \ - -data data/cnndm/CNNDM \ - -copy_attn \ - -global_attention mlp \ - -word_vec_size 128 \ - -rnn_size 512 \ - -layers 1 \ - -encoder_type brnn \ - -train_steps 200000 \ - -max_grad_norm 2 \ - -dropout 0. \ - -batch_size 16 \ - -valid_batch_size 16 \ - -optim adagrad \ - -learning_rate 0.15 \ - -adagrad_accumulator_init 0.1 \ - -reuse_copy_attn \ - -copy_loss_by_seqlength \ - -bridge \ - -seed 777 \ - -world_size 2 \ - -gpu_ranks 0 1 +The basic RNN configuration is defined by these parameters: + +```yaml +# maximum vocab size +src_vocab_size: 50000 +tgt_vocab_size: 50000 + +src_vocab: cnndm/run/example.vocab.src +tgt_vocab: cnndm/run/example.vocab.tgt + +save_model: cnndm/run/model +copy_attn: true +global_attention: mlp +word_vec_size: 128 +rnn_size: 512 +layers: 1 +encoder_type: brnn +train_steps: 200000 +max_grad_norm: 2 +dropout: 0 +batch_size: 16 +valid_batch_size: 16 +optim: adagrad +learning_rate: 0.15 +adagrad_accumulator_init: 0.1 +reuse_copy_attn: true +copy_loss_by_seqlength: true +bridge: true +seed: 777 +world_size: 2 +gpu_ranks: [0, 1] ``` (2) CNN-DM Transformer -The following script trains the transformer model on CNN-DM - -```bash -onmt_train -data data/cnndm/CNNDM \ - -save_model models/cnndm \ - -layers 4 \ - -rnn_size 512 \ - -word_vec_size 512 \ - -max_grad_norm 0 \ - -optim adam \ - -encoder_type transformer \ - -decoder_type transformer \ - -position_encoding \ - -dropout 0\.2 \ - -param_init 0 \ - -warmup_steps 8000 \ - -learning_rate 2 \ - -decay_method noam \ - -label_smoothing 0.1 \ - -adam_beta2 0.998 \ - -batch_size 4096 \ - -batch_type tokens \ - -normalization tokens \ - -max_generator_batches 2 \ - -train_steps 200000 \ - -accum_count 4 \ - -share_embeddings \ - -copy_attn \ - -param_init_glorot \ - -world_size 2 \ - -gpu_ranks 0 1 +Transformer configuration is the following: + +```yaml +src_vocab_size: 50000 +tgt_vocab_size: 50000 + +src_vocab: cnndm/run/example.vocab.src +tgt_vocab: cnndm/run/example.vocab.tgt + +save_model: cnndm/run/model_transformer +layers: 4 +rnn_size: 512 +word_vec_size: 512 +max_grad_norm: 0 +optim: adam +encoder_type: transformer +decoder_type: transformer +position_encoding: true +dropout: 0.2 +attention_dropout: 0.2 +param_init: 0 +warmup_steps: 8000 +learning_rate: 2 +decay_method: noam +label_smoothing: 0.1 +adam_beta2: 0.998 +batch_size: 4096 +batch_type: tokens +normalization: tokens +train_steps: 200000 +accum_count: 4 +share_embeddings: true +copy_attn: true +param_init_glorot: true +world_size: 2 +gpu_ranks: [0, 1] ``` (3) Gigaword -Gigaword can be trained equivalently. As a baseline, we show a model trained with the following command: - -``` -onmt_train -data data/giga/GIGA \ - -save_model models/giga \ - -copy_attn \ - -reuse_copy_attn \ - -train_steps 200000 +Gigaword can be trained equivalently. You just need to adapt the `data` part of the YAML configuration. + +```yaml +# gigaword.yaml + +## Where the vocab(s) will be written +save_data: gigaword/run/example +# Prevent overwriting existing files in the folder +overwrite: False + +# prevent filtering of long examples +src_seq_length: 10000 +tgt_seq_length: 10000 + +# common vocabulary for source and target +share_vocab: True + +# Corpus opts: +data: + cnndm: + path_src: gigaword/train.article.txt + path_tgt: gigaword/train.title.txt + transforms: [filtertoolong] + weight: 1 + valid: + path_src: gigaword/valid.article.txt + path_tgt: gigaword/valid.title.txt + transforms: [filtertoolong] +... ``` @@ -172,7 +208,7 @@ During inference, we use beam-search with a beam-size of 10. We also added speci - `block_ngram_repeat 3`: Prevent the model from repeating trigrams. - `ignore_when_blocking "." "" ""`: Allow the model to repeat trigrams with the sentence boundary tokens. -**commands used**: +**Commands used**: (1) CNN-DM @@ -180,8 +216,8 @@ During inference, we use beam-search with a beam-size of 10. We also added speci onmt_translate -gpu X \ -batch_size 20 \ -beam_size 10 \ - -model models/cnndm... \ - -src data/cnndm/test.txt.src \ + -model cnndm/run/... \ + -src cnndm/test.txt.src \ -output testout/cnndm.out \ -min_length 35 \ -verbose \ @@ -196,12 +232,11 @@ onmt_translate -gpu X \ ``` - ### Evaluation #### CNN-DM -To evaluate the ROUGE scores on CNN-DM, we extended the pyrouge wrapper with additional evaluations such as the amount of repeated n-grams (typically found in models with copy attention), found [here](https://github.com/sebastianGehrmann/rouge-baselines). The repository includes a sub-repo called pyrouge. Make sure to clone the code with the `git clone --recurse-submodules https://github.com/sebastianGehrmann/rouge-baselines` command to check this out as well and follow the installation instructions on the pyrouge repository before calling this script. +To evaluate the ROUGE scores on CNN-DM, we extended the `pyrouge` wrapper with additional evaluations such as the amount of repeated n-grams (typically found in models with copy attention), found [here](https://github.com/sebastianGehrmann/rouge-baselines). The repository includes a sub-repo called pyrouge. Make sure to clone the code with the `git clone --recurse-submodules https://github.com/sebastianGehrmann/rouge-baselines` command to check this out as well and follow the installation instructions on the pyrouge repository before calling this script. The installation instructions can be found [here](https://github.com/falcondai/pyrouge/tree/9cdbfbda8b8d96e7c2646ffd048743ddcf417ed9#installation). Note that on MacOS, we found that the pointer to your perl installation in line 1 of `pyrouge/RELEASE-1.5.5/ROUGE-1.5.5.pl` might be different from the one you have installed. A simple fix is to change this line to `#!/usr/local/bin/perl -w` if it fails. It can be run with the following command: @@ -216,7 +251,7 @@ The `sent_tag_verbatim` option strips `` and `` tags around sentences - w For evaluation of large test sets such as Gigaword, we use the a parallel python wrapper around ROUGE, found [here](https://github.com/pltrdy/files2rouge). -**command used**: +**Command used**: `files2rouge giga.out test.title.txt --verbose` ### Scores and Models diff --git a/docs/source/examples/Translation.md b/docs/source/examples/Translation.md new file mode 100644 index 0000000000..0cf26baa56 --- /dev/null +++ b/docs/source/examples/Translation.md @@ -0,0 +1,181 @@ + +# Translation + +This example is for training for the [WMT'14 English to German news translation task](https://www.statmt.org/wmt14/translation-task.html). It will use on the fly tokenization with [sentencepiece](https://github.com/google/sentencepiece) and [sacrebleu](https://github.com/mjpost/sacrebleu) for evaluation. + + +## Step 0: Download the data and prepare the subwords model + +Preliminary steps are defined in the [`examples/scripts/prepare_wmt_data.sh`](https://github.com/OpenNMT/OpenNMT-py/tree/master/examples/scripts/prepare_wmt_data.sh). The following command will download the necessary datasets, and prepare a sentencepiece model: +```bash +chmod u+x prepare_wmt_data.sh +./prepare_wmt_data.sh +``` + +Note: you should have installed [sentencepiece](https://github.com/google/sentencepiece) binaries before running this script. + +## Step 1. Build the vocabulary. + +We need to setup the desired configuration with 1. the data 2. the tokenization options: + +```yaml +# wmt14_en_de.yaml +save_data: data/wmt/run/example + +# Corpus opts: +data: + commoncrawl: + path_src: data/wmt/commoncrawl.de-en.en + path_tgt: data/wmt/commoncrawl.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 23 + europarl: + path_src: data/wmt/europarl-v7.de-en.en + path_tgt: data/wmt/europarl-v7.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 19 + news_commentary: + path_src: data/wmt/news-commentary-v11.de-en.en + path_tgt: data/wmt/news-commentary-v11.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 3 + valid: + path_src: data/wmt/valid.en + path_tgt: data/wmt/valid.de + transforms: [sentencepiece] + +### Transform related opts: +#### Subword +src_subword_model: data/wmt/wmtende.model +tgt_subword_model: data/wmt/wmtende.model +src_subword_nbest: 1 +src_subword_alpha: 0.0 +tgt_subword_nbest: 1 +tgt_subword_alpha: 0.0 +#### Filter +src_seq_length: 150 +tgt_seq_length: 150 + +# silently ignore empty lines in the data +skip_empty_level: silent + +``` + +Then we can execute the vocabulary building script. Let's set `-n_sample` to `-1` to compute the vocabulary over the whole corpora: + +```bash +onmt_build_vocab -config wmt14_en_de.yaml -n_sample -1 +``` + +## Step 2: Train the model + +We need to add the following parameters to the YAML configuration: + +```yaml +... + +# General opts +save_model: data/wmt/run/model +keep_checkpoint: 50 +save_checkpoint_steps: 5000 +average_decay: 0.0005 +seed: 1234 +report_every: 100 +train_steps: 100000 +valid_steps: 5000 + +# Batching +queue_size: 10000 +bucket_size: 32768 +world_size: 2 +gpu_ranks: [0, 1] +batch_type: "tokens" +batch_size: 4096 +valid_batch_size: 16 +batch_size_multiple: 1 +max_generator_batches: 0 +accum_count: [3] +accum_steps: [0] + +# Optimization +model_dtype: "fp32" +optim: "adam" +learning_rate: 2 +warmup_steps: 8000 +decay_method: "noam" +adam_beta2: 0.998 +max_grad_norm: 0 +label_smoothing: 0.1 +param_init: 0 +param_init_glorot: true +normalization: "tokens" + +# Model +encoder_type: transformer +decoder_type: transformer +enc_layers: 6 +dec_layers: 6 +heads: 8 +rnn_size: 512 +word_vec_size: 512 +transformer_ff: 2048 +dropout_steps: [0] +dropout: [0.1] +attention_dropout: [0.1] +share_decoder_embeddings: true +share_embeddings: true +``` + +## Step 3: Translate and evaluate + +We need to tokenize the testset with the same sentencepiece model as used in training: + +```bash +spm_encode --model=data/wmt/wmtende.model \ + < data/wmt/test.en \ + > data/wmt/test.en.sp +spm_encode --model=data/wmt/wmtende.model \ + < data/wmt/test.de \ + > data/wmt/test.de.sp +``` + +We can translate the testset with the following command: + +```bash +for checkpoint in data/wmt/run/model_step*.pt; do + echo "# Translating with checkpoint $checkpoint" + base=$(basename $checkpoint) + onmt_translate \ + -gpu 0 \ + -batch_size 16384 -batch_type tokens \ + -beam_size 5 \ + -model $checkpoint \ + -src data/wmt/test.en.sp \ + -tgt data/wmt/test.de.sp \ + -output data/wmt/test.de.hyp_${base%.*}.sp +done +``` + +Prior to evaluation, we need to detokenize the hypothesis: + +```bash +for checkpoint in data/wmt/run/model_step*.pt; do + base=$(basename $checkpoint) + spm_decode \ + -model=data/wmt/wmtende.model \ + -input_format=piece \ + < data/wmt/test.de.hyp_${base%.*}.sp \ + > data/wmt/test.de.hyp_${base%.*} +done +``` + + +Finally, we can compute detokenized BLEU with `sacrebleu`: + +```bash +for checkpoint in data/wmt/run/model_step*.pt; do + echo "$checkpoint" + base=$(basename $checkpoint) + sacrebleu data/wmt/test.de < data/wmt/test.de.hyp_${base%.*} +done +``` diff --git a/docs/source/extended.md b/docs/source/extended.md deleted file mode 100644 index 1f27927265..0000000000 --- a/docs/source/extended.md +++ /dev/null @@ -1,39 +0,0 @@ - -# Translation - -The example below uses the Moses tokenizer (http://www.statmt.org/moses/) to prepare the data and the moses BLEU script for evaluation. This example if for training for the WMT'16 Multimodal Translation task (http://www.statmt.org/wmt16/multimodal-task.html). - -Step 0. Download the data. - -```bash -mkdir -p data/multi30k -wget http://www.quest.dcs.shef.ac.uk/wmt16_files_mmt/training.tar.gz && tar -xf training.tar.gz -C data/multi30k && rm training.tar.gz -wget http://www.quest.dcs.shef.ac.uk/wmt16_files_mmt/validation.tar.gz && tar -xf validation.tar.gz -C data/multi30k && rm validation.tar.gz -wget http://www.quest.dcs.shef.ac.uk/wmt17_files_mmt/mmt_task1_test2016.tar.gz && tar -xf mmt_task1_test2016.tar.gz -C data/multi30k && rm mmt_task1_test2016.tar.gz -``` - -Step 1. Preprocess the data. - -```bash -for l in en de; do for f in data/multi30k/*.$l; do if [[ "$f" != *"test"* ]]; then sed -i "$ d" $f; fi; done; done -for l in en de; do for f in data/multi30k/*.$l; do perl tools/tokenizer.perl -a -no-escape -l $l -q < $f > $f.atok; done; done -onmt_preprocess -train_src data/multi30k/train.en.atok -train_tgt data/multi30k/train.de.atok -valid_src data/multi30k/val.en.atok -valid_tgt data/multi30k/val.de.atok -save_data data/multi30k.atok.low -lower -``` - -Step 2. Train the model. - -```bash -onmt_train -data data/multi30k.atok.low -save_model multi30k_model -gpu_ranks 0 -``` - -Step 3. Translate sentences. - -```bash -onmt_translate -gpu 0 -model multi30k_model_*_e13.pt -src data/multi30k/test2016.en.atok -tgt data/multi30k/test2016.de.atok -replace_unk -verbose -output multi30k.test.pred.atok -``` - -And evaluate - -```bash -perl tools/multi-bleu.perl data/multi30k/test2016.de.atok < multi30k.test.pred.atok -``` diff --git a/docs/source/index.rst b/docs/source/index.rst index 260d6c0fed..67bb9d6a7a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -7,29 +7,31 @@ Contents main.md quickstart.md - FAQ.md CONTRIBUTING.md ref.rst +.. toctree:: + :caption: FAQ + :maxdepth: 2 + + FAQ.md + .. toctree:: :caption: Examples :maxdepth: 2 - Library.md - extended.md - Summarization.md - im2text.md - speech2text.md - vid2text.rst - ggnn.md + examples/Library.md + examples/Translation.md + examples/Summarization.md + examples/GGNN.md .. toctree:: :caption: Scripts :maxdepth: 2 - options/preprocess.rst + options/build_vocab.rst options/train.rst options/translate.rst options/server.rst @@ -43,4 +45,14 @@ Contents onmt.modules.rst onmt.translation.rst onmt.translate.translation_server.rst - onmt.inputters.rst \ No newline at end of file + onmt.inputters.rst + + +.. toctree:: + :caption: Legacy + :maxdepth: 2 + + legacy/FAQ.md + legacy/im2text.md + legacy/speech2text.md + legacy/vid2text.rst diff --git a/docs/source/legacy/FAQ.md b/docs/source/legacy/FAQ.md new file mode 100644 index 0000000000..73f06a1c1d --- /dev/null +++ b/docs/source/legacy/FAQ.md @@ -0,0 +1,186 @@ +# FAQ (Legacy version) + +This is the FAQ for the legacy version of OpenNMT-py (prior to OpenNMT-py v2.0 release). + +## How do I use Pretrained embeddings (e.g. GloVe)? + +Using vocabularies from OpenNMT-py preprocessing outputs, `embeddings_to_torch.py` to generate encoder and decoder embeddings initialized with GloVe's values. + +the script is a slightly modified version of ylhsieh's one2. + +Usage: + +```shell +embeddings_to_torch.py [-h] [-emb_file_both EMB_FILE_BOTH] + [-emb_file_enc EMB_FILE_ENC] + [-emb_file_dec EMB_FILE_DEC] -output_file + OUTPUT_FILE -dict_file DICT_FILE [-verbose] + [-skip_lines SKIP_LINES] + [-type {GloVe,word2vec}] +``` + +Run embeddings_to_torch.py -h for more usagecomplete info. + +### Example + +1. Get GloVe files: + + ```shell + mkdir "glove_dir" + wget http://nlp.stanford.edu/data/glove.6B.zip + unzip glove.6B.zip -d "glove_dir" + ``` + +2. Prepare data: + + ```shell + onmt_preprocess \ + -train_src data/train.src.txt \ + -train_tgt data/train.tgt.txt \ + -valid_src data/valid.src.txt \ + -valid_tgt data/valid.tgt.txt \ + -save_data data/data + ``` + +3. Prepare embeddings: + + ```shell + ./tools/embeddings_to_torch.py -emb_file_both "glove_dir/glove.6B.100d.txt" \ + -dict_file "data/data.vocab.pt" \ + -output_file "data/embeddings" + ``` + +4. Train using pre-trained embeddings: + + ```shell + onmt_train -save_model data/model \ + -batch_size 64 \ + -layers 2 \ + -rnn_size 200 \ + -word_vec_size 100 \ + -pre_word_vecs_enc "data/embeddings.enc.pt" \ + -pre_word_vecs_dec "data/embeddings.dec.pt" \ + -data data/data + ``` + +## How do I use the Transformer model? + +The transformer model is very sensitive to hyperparameters. To run it +effectively you need to set a bunch of different options that mimic the Google +setup. We have confirmed the following command can replicate their WMT results. + +```shell +python train.py -data /tmp/de2/data -save_model /tmp/extra \ + -layers 6 -rnn_size 512 -word_vec_size 512 -transformer_ff 2048 -heads 8 \ + -encoder_type transformer -decoder_type transformer -position_encoding \ + -train_steps 200000 -max_generator_batches 2 -dropout 0.1 \ + -batch_size 4096 -batch_type tokens -normalization tokens -accum_count 2 \ + -optim adam -adam_beta2 0.998 -decay_method noam -warmup_steps 8000 -learning_rate 2 \ + -max_grad_norm 0 -param_init 0 -param_init_glorot \ + -label_smoothing 0.1 -valid_steps 10000 -save_checkpoint_steps 10000 \ + -world_size 4 -gpu_ranks 0 1 2 3 +``` + +Here are what each of the parameters mean: + +* `param_init_glorot` `-param_init 0`: correct initialization of parameters +* `position_encoding`: add sinusoidal position encoding to each embedding +* `optim adam`, `decay_method noam`, `warmup_steps 8000`: use special learning rate. +* `batch_type tokens`, `normalization tokens`, `accum_count 4`: batch and normalize based on number of tokens and not sentences. Compute gradients based on four batches. +* `label_smoothing 0.1`: use label smoothing loss. + +## Do you support multi-gpu? + +First you need to make sure you `export CUDA_VISIBLE_DEVICES=0,1,2,3`. + +If you want to use GPU id 1 and 3 of your OS, you will need to `export CUDA_VISIBLE_DEVICES=1,3` + +Both `-world_size` and `-gpu_ranks` need to be set. E.g. `-world_size 4 -gpu_ranks 0 1 2 3` will use 4 GPU on this node only. + +If you want to use 2 nodes with 2 GPU each, you need to set `-master_ip` and `-master_port`, and + +* `-world_size 4 -gpu_ranks 0 1`: on the first node +* `-world_size 4 -gpu_ranks 2 3`: on the second node +* `-accum_count 2`: This will accumulate over 2 batches before updating parameters. + +if you use a regular network card (1 Gbps) then we suggest to use a higher `-accum_count` to minimize the inter-node communication. + +**Note:** + +When training on several GPUs, you can't have them in 'Exclusive' compute mode (`nvidia-smi -c 3`). + +The multi-gpu setup relies on a Producer/Consumer setup. This setup means there will be `2 + 1` processes spawned, with 2 processes per GPU, one for model training and one (Consumer) that hosts a `Queue` of batches that will be processed next. The additional process is the Producer, creating batches and sending them to the Consumers. This setup is beneficial for both wall time and memory, since it loads data shards 'in advance', and does not require to load it for each GPU process. + +## How can I ensemble Models at inference? + +You can specify several models in the translate.py command line: -model model1_seed1 model2_seed2 +Bear in mind that your models must share the same target vocabulary. + +## How can I weight different corpora at training? + +### Preprocessing + +We introduced `-train_ids` which is a list of IDs that will be given to the preprocessed shards. + +E.g. we have two corpora : `parallel.en` and `parallel.de` + `from_backtranslation.en` `from_backtranslation.de`, we can pass the following in the `preprocess.py` command: + +```shell +... +-train_src parallel.en from_backtranslation.en \ +-train_tgt parallel.de from_backtranslation.de \ +-train_ids A B \ +-save_data my_data \ +... +``` + +and it will dump `my_data.train_A.X.pt` based on `parallel.en`//`parallel.de` and `my_data.train_B.X.pt` based on `from_backtranslation.en`//`from_backtranslation.de`. + +### Training + +We introduced `-data_ids` based on the same principle as above, as well as `-data_weights`, which is the list of the weight each corpus should have. +E.g. + +```shell +... +-data my_data \ +-data_ids A B \ +-data_weights 1 7 \ +... +``` + +will mean that we'll look for `my_data.train_A.*.pt` and `my_data.train_B.*.pt`, and that when building batches, we'll take 1 example from corpus A, then 7 examples from corpus B, and so on. + +**Warning**: This means that we'll load as many shards as we have `-data_ids`, in order to produce batches containing data from every corpus. It may be a good idea to reduce the `-shard_size` at preprocessing. + +## Can I get word alignment while translating? + +### Raw alignments from averaging Transformer attention heads + +Currently, we support producing word alignment while translating for Transformer based models. Using `-report_align` when calling `translate.py` will output the inferred alignments in Pharaoh format. Those alignments are computed from an argmax on the average of the attention heads of the *second to last* decoder layer. The resulting alignment src-tgt (Pharaoh) will be pasted to the translation sentence, separated by ` ||| `. +Note: The *second to last* default behaviour was empirically determined. It is not the same as the paper (they take the *penultimate* layer), probably because of light differences in the architecture. + +* alignments use the standard "Pharaoh format", where a pair `i-j` indicates the ith word of source language is aligned to jth word of target language. +* Example: {'src': 'das stimmt nicht !'; 'output': 'that is not true ! ||| 0-0 0-1 1-2 2-3 1-4 1-5 3-6'} +* Using the`-tgt` option when calling `translate.py`, we output alignments between the source and the gold target rather than the inferred target, assuming we're doing evaluation. +* To convert subword alignments to word alignments, or symetrize bidirectional alignments, please refer to the [lilt scripts](https://github.com/lilt/alignment-scripts). + +### Supervised learning on a specific head + +The quality of output alignments can be further improved by providing reference alignments while training. This will invoke multi-task learning on translation and alignment. This is an implementation based on the paper [Jointly Learning to Align and Translate with Transformer Models](https://arxiv.org/abs/1909.02074). + +The data need to be preprocessed with the reference alignments in order to learn the supervised task. + +When calling `preprocess.py`, add: + +* `--train_align `: path(s) to the training alignments in Pharaoh format +* `--valid_align `: path to the validation set alignments in Pharaoh format (optional). +The reference alignment file(s) could be generated by [GIZA++](https://github.com/moses-smt/mgiza/) or [fast_align](https://github.com/clab/fast_align). + +Note: There should be no blank lines in the alignment files provided. + +Options to learn such alignments are: + +* `-lambda_align`: set the value > 0.0 to enable joint align training, the paper suggests 0.05; +* `-alignment_layer`: indicate the index of the decoder layer; +* `-alignment_heads`: number of alignment heads for the alignment task - should be set to 1 for the supervised task, and preferably kept to default (or same as `num_heads`) for the average task; +* `-full_context_alignment`: do full context decoder pass (no future mask) when computing alignments. This will slow down the training (~12% in terms of tok/s) but will be beneficial to generate better alignment. diff --git a/docs/source/im2text.md b/docs/source/legacy/im2text.md similarity index 95% rename from docs/source/im2text.md rename to docs/source/legacy/im2text.md index 7f5edab448..7f223f9d16 100644 --- a/docs/source/im2text.md +++ b/docs/source/legacy/im2text.md @@ -1,5 +1,11 @@ # Image to Text +--------- + +**WARNING**: This example is based on the [legacy version of OpenNMT-py](https://github.com/OpenNMT/OpenNMT-py/tree/legacy)! + +--------- + A deep learning-based approach to learning the image-to-text conversion, built on top of the OpenNMT system. It is completely data-driven, hence can be used for a variety of image-to-text problems, such as image captioning, optical character recognition and LaTeX decompilation. Take LaTeX decompilation as an example, given a formula image: diff --git a/docs/source/speech2text.md b/docs/source/legacy/speech2text.md similarity index 94% rename from docs/source/speech2text.md rename to docs/source/legacy/speech2text.md index 1239852061..8484388fbe 100644 --- a/docs/source/speech2text.md +++ b/docs/source/legacy/speech2text.md @@ -1,5 +1,12 @@ # Speech to Text +--------- + +**WARNING**: This example is based on the [legacy version of OpenNMT-py](https://github.com/OpenNMT/OpenNMT-py/tree/legacy)! + +--------- + + A deep learning-based approach to learning the speech-to-text conversion, built on top of the OpenNMT system. Given raw audio, we first apply short-time Fourier transform (STFT), then apply Convolutional Neural Networks to get the source features. Based on this source representation, we use an LSTM decoder with attention to produce the text character by character. diff --git a/docs/source/vid2text.rst b/docs/source/legacy/vid2text.rst similarity index 99% rename from docs/source/vid2text.rst rename to docs/source/legacy/vid2text.rst index 0e591d620e..3b794cf872 100644 --- a/docs/source/vid2text.rst +++ b/docs/source/legacy/vid2text.rst @@ -1,6 +1,15 @@ Video to Text ============= +--------- + +**WARNING**: This example is based on the +`legacy version of OpenNMT-py `_ +! + +--------- + + Recurrent --------- diff --git a/docs/source/main.md b/docs/source/main.md index 4a312840ec..26607943e0 100644 --- a/docs/source/main.md +++ b/docs/source/main.md @@ -1,12 +1,11 @@ # Overview -This portal provides a detailed documentation of the OpenNMT toolkit. It describes how to use the PyTorch project and how it works. +This portal provides a detailed documentation of the OpenNMT-py toolkit. It describes how to use the PyTorch project and how it works. ## Installation -Install from `pip`: Install `OpenNMT-py` from `pip`: ```bash pip install OpenNMT-py @@ -19,19 +18,18 @@ cd OpenNMT-py python setup.py install ``` -*(Optionnal)* some advanced features (e.g. working audio, image or pretrained models) requires extra packages, you can install it with: +*(Optional)* some advanced features (e.g. working pretrained models or specific transforms) requires extra packages, you can install it with: ```bash pip install -r requirements.opt.txt ``` -And you are ready to go! Take a look at the [quickstart](quickstart) to familiarize yourself with the main training workflow. +And you are ready to go! -Alternatively you can use Docker to install with `nvidia-docker`. The main Dockerfile is included -in the root directory. +Take a look at the [quickstart](quickstart) to familiarize yourself with the main training workflow. ## Citation -When using OpenNMT for research please cite our +When using OpenNMT-py for research please cite our [OpenNMT technical report](https://doi.org/10.18653/v1/P17-4012) ``` @@ -53,6 +51,6 @@ When using OpenNMT for research please cite our You can find additional help or tutorials in the following resources: -* [Gitter channel](https://gitter.im/OpenNMT/openmt-py) - * [Forum](http://forum.opennmt.net/) + +* [Gitter channel](https://gitter.im/OpenNMT/openmt-py) diff --git a/docs/source/modules.rst b/docs/source/modules.rst deleted file mode 100644 index e045c01514..0000000000 --- a/docs/source/modules.rst +++ /dev/null @@ -1,7 +0,0 @@ -onmt -==== - -.. toctree:: - :maxdepth: 4 - - onmt diff --git a/docs/source/onmt.inputters.rst b/docs/source/onmt.inputters.rst index a5b0f0815f..99507e296d 100644 --- a/docs/source/onmt.inputters.rst +++ b/docs/source/onmt.inputters.rst @@ -12,12 +12,6 @@ Data Readers .. autoclass:: onmt.inputters.TextDataReader :members: -.. autoclass:: onmt.inputters.ImageDataReader - :members: - -.. autoclass:: onmt.inputters.AudioDataReader - :members: - Dataset -------- diff --git a/docs/source/onmt.modules.rst b/docs/source/onmt.modules.rst index 07764ff4eb..a3ef216ef9 100644 --- a/docs/source/onmt.modules.rst +++ b/docs/source/onmt.modules.rst @@ -95,21 +95,6 @@ Architecture: SRU :members: -Alternative Encoders --------------------- - -onmt\.modules\.AudioEncoder - -.. autoclass:: onmt.encoders.AudioEncoder - :members: - - -onmt\.modules\.ImageEncoder - -.. autoclass:: onmt.encoders.ImageEncoder - :members: - - Copy Attention -------------- diff --git a/docs/source/onmt.rst b/docs/source/onmt.rst index fa44bcc6c0..5ae056ce09 100644 --- a/docs/source/onmt.rst +++ b/docs/source/onmt.rst @@ -26,7 +26,7 @@ Loss Optimizer ------ +--------- .. autoclass:: onmt.utils.Optimizer :members: diff --git a/docs/source/options/build_vocab.rst b/docs/source/options/build_vocab.rst new file mode 100644 index 0000000000..cdab43a916 --- /dev/null +++ b/docs/source/options/build_vocab.rst @@ -0,0 +1,7 @@ +Build Vocab +=========== + +.. argparse:: + :filename: ../onmt/bin/build_vocab.py + :func: _get_parser + :prog: build_vocab.py \ No newline at end of file diff --git a/docs/source/options/preprocess.rst b/docs/source/options/preprocess.rst deleted file mode 100644 index d7ccc66e8b..0000000000 --- a/docs/source/options/preprocess.rst +++ /dev/null @@ -1,7 +0,0 @@ -Preprocess -========== - -.. argparse:: - :filename: ../onmt/bin/preprocess.py - :func: _get_parser - :prog: preprocess.py \ No newline at end of file diff --git a/docs/source/quickstart.md b/docs/source/quickstart.md index d1e24adecf..134f97f8be 100644 --- a/docs/source/quickstart.md +++ b/docs/source/quickstart.md @@ -2,14 +2,22 @@ # Quickstart - -### Step 1: Preprocess the data +### Step 0: Install OpenNMT-py ```bash -onmt_preprocess -train_src data/src-train.txt -train_tgt data/tgt-train.txt -valid_src data/src-val.txt -valid_tgt data/tgt-val.txt -save_data data/demo +pip install --upgrade pip +pip install OpenNMT-py ``` -We will be working with some example data in `data/` folder. +### Step 1: Prepare the data + +To get started, we propose to download a toy English-German dataset for machine translation containing 10k tokenized sentences: + +```bash +wget https://s3.amazonaws.com/opennmt-trainingdata/toy-ende.tar.gz +tar xf toy-ende.tar.gz +cd toy-ende +``` The data consists of parallel source (`src`) and target (`tgt`) data containing one sentence per line with tokens separated by a space: @@ -18,37 +26,94 @@ The data consists of parallel source (`src`) and target (`tgt`) data containing * `src-val.txt` * `tgt-val.txt` -Validation files are required and used to evaluate the convergence of the training. It usually contains no more than 5000 sentences. +Validation files are used to evaluate the convergence of the training. It usually contains no more than 5k sentences. ```text -$ head -n 3 data/src-train.txt +$ head -n 3 toy_ende/src-train.txt It is not acceptable that , with the help of the national bureaucracies , Parliament 's legislative prerogative should be made null and void by means of implementing provisions whose content , purpose and extent are not laid down in advance . Federal Master Trainer and Senior Instructor of the Italian Federation of Aerobic Fitness , Group Fitness , Postural Gym , Stretching and Pilates; from 2004 , he has been collaborating with Antiche Terme as personal Trainer and Instructor of Stretching , Pilates and Postural Gym . " Two soldiers came up to me and told me that if I refuse to sleep with them , they will kill me . They beat me and ripped my clothes . ``` +We need to build a **YAML configuration file** to specify the data that will be used: + +```yaml +# toy_en_de.yaml + +## Where the vocab(s) will be written +save_data: toy-ende/run/example +# Prevent overwriting existing files in the folder +overwrite: False + +# Corpus opts: +data: + corpus_1: + path_src: toy-ende/src-train.txt + path_tgt: toy-ende/tgt-train.txt + valid: + path_src: data/src-val.txt + path_tgt: data/tgt-val.txt +... + +``` + +From this configuration, we can build the vocab(s), that will be necessary to train the model: +```bash +onmt_build_vocab -config toy_en_de.yaml -n_sample 10000 +``` + +**Notes**: +- `-n_sample` is required here -- it represents the number of lines sampled from each corpus to build the vocab. +- This configuration is the simplest possible, without any tokenization or other *transforms*. See [other example configurations](https://github.com/OpenNMT/OpenNMT-py/tree/master/config) for more complex pipelines. + + ### Step 2: Train the model +To train a model, we need to **add the following to the YAML configuration file**: +- the vocabulary path(s) that will be used; +- training specific parameters. + +```yaml +# toy_en_de.yaml + +... + +# Vocabulary files that were just created +src_vocab: toy-ende/run/example.vocab.src +tgt_vocab: toy-ende/run/example.vocab.tgt + +# Train on a single GPU +world_size: 1 +gpu_ranks: [0] + +# Where to save the checkpoints +save_model: toy-ende/run/model +save_checkpoint_steps: 500 +train_steps: 1000 +valid_steps: 500 + +``` + +Then you can simply run: + ```bash -onmt_train -data data/demo -save_model demo-model +onmt_train -config toy_en_de.yaml ``` -The main train command is quite simple. Minimally it takes a data file -and a save file. This will run the default model, which consists of a -2-layer LSTM with 500 hidden units on both the encoder/decoder. -If you want to train on GPU, you need to set, as an example: -CUDA_VISIBLE_DEVICES=1,3 -`-world_size 2 -gpu_ranks 0 1` to use (say) GPU 1 and 3 on this node only. -To know more about distributed training on single or multi nodes, read the FAQ section. +This configuration will run the default model, which consists of a 2-layer LSTM with 500 hidden units on both the encoder and decoder. It will run on a single GPU (`world_size 1` & `gpu_ranks [0]`). + +Before the training process actually starts, the `*.vocab.pt` together with `*.transforms.pt` can be dumped to `-save_data` with configurations specified in `-config` yaml file by enabling the `-dump_fields` and `-dump_transforms` flags. It is also possible to generate transformed samples to simplify any potentially required visual inspection. The number of sample lines to dump per corpus is set with the `-n_sample` flag. + +For more advanded models and parameters, see [other example configurations](https://github.com/OpenNMT/OpenNMT-py/tree/master/config) or the [FAQ](FAQ). ### Step 3: Translate ```bash -onmt_translate -model demo-model_XYZ.pt -src data/src-test.txt -output pred.txt -replace_unk -verbose +onmt_translate -model toy-ende/run/model_step_1000.pt -src toy-ende/src-test.txt -output toy-ende/pred_1000.txt -gpu 0 -verbose ``` -Now you have a model which you can use to predict on new data. We do this by running beam search. This will output predictions into `pred.txt`. +Now you have a model which you can use to predict on new data. We do this by running beam search. This will output predictions into `toy-ende/pred_1000.txt`. -Note: +**Note**: The predictions are going to be quite terrible, as the demo dataset is small. Try running on some larger datasets! For example you can download millions of parallel sentences for [translation](http://www.statmt.org/wmt16/translation-task.html) or [summarization](https://github.com/harvardnlp/sent-summary). diff --git a/docs/source/refs.bib b/docs/source/refs.bib index e8084a5c05..be85081a28 100644 --- a/docs/source/refs.bib +++ b/docs/source/refs.bib @@ -481,3 +481,21 @@ @inproceedings{DeeperTransformer pages = "1810--1822", abstract = "Transformer is the state-of-the-art model in recent machine translation evaluations. Two strands of research are promising to improve models of this kind: the first uses wide networks (a.k.a. Transformer-Big) and has been the de facto standard for development of the Transformer system, and the other uses deeper language representation but faces the difficulty arising from learning deep networks. Here, we continue the line of research on the latter. We claim that a truly deep Transformer model can surpass the Transformer-Big counterpart by 1) proper use of layer normalization and 2) a novel way of passing the combination of previous layers to the next. On WMT{'}16 English-German and NIST OpenMT{'}12 Chinese-English tasks, our deep system (30/25-layer encoder) outperforms the shallow Transformer-Big/Base baseline (6-layer encoder) by 0.4-2.4 BLEU points. As another bonus, the deep model is 1.6X smaller in size and 3X faster in training than Transformer-Big.", } + +@article{DBLP:journals/corr/abs-1808-07512, + author = {Xinyi Wang and + Hieu Pham and + Zihang Dai and + Graham Neubig}, + title = {SwitchOut: an Efficient Data Augmentation Algorithm for Neural Machine + Translation}, + journal = {CoRR}, + volume = {abs/1808.07512}, + year = {2018}, + url = {http://arxiv.org/abs/1808.07512}, + archivePrefix = {arXiv}, + eprint = {1808.07512}, + timestamp = {Sun, 02 Sep 2018 15:01:54 +0200}, + biburl = {https://dblp.org/rec/journals/corr/abs-1808-07512.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} diff --git a/examples/cnndm.yaml b/examples/cnndm.yaml new file mode 100644 index 0000000000..57d0812862 --- /dev/null +++ b/examples/cnndm.yaml @@ -0,0 +1,53 @@ +## Where the vocab(s) will be written +save_data: cnndm/run/example +# Prevent overwriting existing files in the folder +overwrite: False + +# filter long examples +src_seq_length: 10000 +tgt_seq_length: 10000 +src_seq_length_trunc: 400 +tgt_seq_length_trunc: 100 + +# common vocabulary for source and target +share_vocab: True + +# Corpus opts: +data: + cnndm: + path_src: cnndm/train.txt.src + path_tgt: cnndm/train.txt.tgt.tagged + transforms: [] + weight: 1 + valid: + path_src: cnndm/val.txt.src + path_tgt: cnndm/val.txt.tgt.tagged + transforms: [] + +src_vocab_size: 50000 +tgt_vocab_size: 50000 + +src_vocab: cnndm/run/example.vocab.src +tgt_vocab: cnndm/run/example.vocab.tgt + +save_model: cnndm/run/model +copy_attn: true +global_attention: mlp +word_vec_size: 128 +rnn_size: 512 +layers: 1 +encoder_type: brnn +train_steps: 200000 +max_grad_norm: 2 +dropout: 0 +batch_size: 16 +valid_batch_size: 16 +optim: adagrad +learning_rate: 0.15 +adagrad_accumulator_init: 0.1 +reuse_copy_attn: true +copy_loss_by_seqlength: true +bridge: true +seed: 777 +world_size: 2 +gpu_ranks: [0, 1] \ No newline at end of file diff --git a/examples/ggnn.yaml b/examples/ggnn.yaml new file mode 100644 index 0000000000..984139c50f --- /dev/null +++ b/examples/ggnn.yaml @@ -0,0 +1,42 @@ +## Where the necessary objects will be written +save_data: OpenNMT-py-ggnn-example/run/example + +# Filter long examples +src_seq_length: 1000 +tgt_seq_length: 30 + +# Data definition +data: + cnndm: + path_src: OpenNMT-py-ggnn-example/src-train.txt + path_tgt: OpenNMT-py-ggnn-example/tgt-train.txt + transforms: [filtertoolong] + weight: 1 + valid: + path_src: OpenNMT-py-ggnn-example/src-val.txt + path_tgt: OpenNMT-py-ggnn-example/tgt-val.txt + +src_vocab: OpenNMT-py-ggnn-example/srcvocab.txt +tgt_vocab: OpenNMT-py-ggnn-example/tgtvocab.txt + +save_model: OpenNMT-py-ggnn-example/run/model + +# Model options +train_steps: 10000 +save_checkpoint_steps: 5000 +encoder_type: ggnn +layers: 2 +decoder_type: rnn +rnn_size: 256 +learning_rate: 0.1 +start_decay_steps: 5000 +learning_rate_decay: 0.8 +global_attention: general +batch_size: 32 +word_vec_size: 256 +bridge: true +gpu_ranks: 0 +n_edge_types: 9 +state_dim: 256 +n_steps: 10 +n_node: 64 \ No newline at end of file diff --git a/examples/onmt.train.fp16.transformer.yaml b/examples/onmt.train.fp16.transformer.yaml new file mode 100644 index 0000000000..abfc735d47 --- /dev/null +++ b/examples/onmt.train.fp16.transformer.yaml @@ -0,0 +1,102 @@ +# Meta opts: +## IO +save_data: generated/dynamic.ex0 +overwrite: False + +### vocab: +src_vocab: data/vocab-train.src +tgt_vocab: data/vocab-train.tgt +src_vocab_size: 32000 +tgt_vocab_size: 32000 +vocab_size_multiple: 8 +src_words_min_frequency: 10 +tgt_words_min_frequency: 10 +share_vocab: True + +### Transform related opts: +#### Subword +src_subword_model: examples/subword.spm.model +tgt_subword_model: examples/subword.spm.model +src_subword_nbest: 1 +tgt_subword_nbest: 1 +src_subword_alpha: 0.0 +tgt_subword_alpha: 0.0 +src_subword_type: sentencepiece +tgt_subword_type: sentencepiece +src_onmttok_kwargs: "{'mode': 'aggressive', 'spacer_annotate': True}" +tgt_onmttok_kwargs: "{'mode': 'aggressive', 'spacer_annotate': True}" +#### Sampling +switchout_temperature: 1.0 +tokendrop_temperature: 1.0 +tokenmask_temperature: 1.0 +#### Filter +src_seq_length: 300 +tgt_seq_length: 300 +#### BART +permute_sent_ratio: 0.0 +rotate_ratio: 0.0 +insert_ratio: 0.0 +random_ratio: 0.0 +mask_ratio: 0.0 +mask_length: subword +poisson_lambda: 3.0 +replace_length: 1 + +# Corpus opts: +data: + corpus_1: + path_src: data/src-train.txt + path_tgt: data/tgt-train.txt + transforms: [tokenmask, tokendrop, onmt_tokenize, filtertoolong] + valid: + path_src: data/src-val.txt + path_tgt: data/tgt-val.txt + transforms: [onmt_tokenize] + +# Model configuration +save_model: foo +keep_checkpoint: 50 +save_checkpoint_steps: 4000 +average_decay: 0.0001 +seed: 2345 +report_every: 100 +train_steps: 100000 +valid_steps: 4000 + +queue_size: 10000 +bucket_size: 32768 +world_size: 2 +gpu_ranks: [0, 1] +batch_type: "tokens" +batch_size: 4096 +valid_batch_size: 8 +batch_size_multiple: 1 +max_generator_batches: 0 +accum_count: [3] +accum_steps: [0] + +model_dtype: "fp16" +optim: "fusedadam" +learning_rate: 2 +warmup_steps: 6000 +decay_method: "noam" +adam_beta2: 0.998 +max_grad_norm: 0 +label_smoothing: 0.1 +param_init: 0 +param_init_glorot: true +normalization: "tokens" + +encoder_type: transformer +decoder_type: transformer +enc_layers: 6 +dec_layers: 6 +heads: 8 +rnn_size: 512 +word_vec_size: 512 +transformer_ff: 2048 +dropout_steps: [0] +dropout: [0.1] +attention_dropout: [0.1] +share_decoder_embeddings: true +share_embeddings: true diff --git a/examples/scripts/prepare_wmt_data.sh b/examples/scripts/prepare_wmt_data.sh new file mode 100755 index 0000000000..1194d07b94 --- /dev/null +++ b/examples/scripts/prepare_wmt_data.sh @@ -0,0 +1,158 @@ +#!/bin/bash + +################################################################################## +# The default script downloads the commoncrawl, europarl and newstest2014 and +# newstest2017 datasets. Files that are not English or German are removed in +# this script for tidyness.You may switch datasets out depending on task. +# (Note that commoncrawl europarl-v7 are the same for all tasks). +# http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz +# http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz +# +# WMT14 http://www.statmt.org/wmt14/training-parallel-nc-v9.tgz +# WMT15 http://www.statmt.org/wmt15/training-parallel-nc-v10.tgz +# WMT16 http://data.statmt.org/wmt16/translation-task/training-parallel-nc-v11.tgz +# WMT17 http://data.statmt.org/wmt17/translation-task/training-parallel-nc-v12.tgz +# Note : there are very little difference, but each year added a few sentences +# new WMT17 http://data.statmt.org/wmt17/translation-task/rapid2016.tgz +# +# For WMT16 Rico Sennrich released some News back translation +# http://data.statmt.org/rsennrich/wmt16_backtranslations/en-de/ +# +# Tests sets: http://data.statmt.org/wmt17/translation-task/test.tgz +################################################################################## + +# provide script usage instructions +if [ $# -eq 0 ] +then + echo "usage: $0 " + exit 1 +fi + +# set relevant paths +SP_PATH=/usr/local/bin +DATA_PATH=$1 +TEST_PATH=$DATA_PATH/test + +CUR_DIR=$(pwd) + +# set vocabulary size and source and target languages +vocab_size=32000 +sl=en +tl=de + +# Download the default datasets into the $DATA_PATH; mkdir if it doesn't exist +mkdir -p $DATA_PATH +cd $DATA_PATH + +echo "Downloading and extracting Commoncrawl data (919 MB) for training..." +wget --trust-server-names http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz +tar zxvf training-parallel-commoncrawl.tgz +ls | grep -v 'commoncrawl.de-en.[de,en]' | xargs rm + +echo "Downloading and extracting Europarl data (658 MB) for training..." +wget --trust-server-names http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz +tar zxvf training-parallel-europarl-v7.tgz +cd training && ls | grep -v 'europarl-v7.de-en.[de,en]' | xargs rm +cd .. && mv training/europarl* . && rm -r training training-parallel-europarl-v7.tgz + +echo "Downloading and extracting News Commentary data (76 MB) for training..." +wget --trust-server-names http://data.statmt.org/wmt16/translation-task/training-parallel-nc-v11.tgz +tar zxvf training-parallel-nc-v11.tgz +cd training-parallel-nc-v11 && ls | grep -v news-commentary-v11.de-en.[de,en] | xargs rm +cd .. && mv training-parallel-nc-v11/* . && rm -r training-parallel-nc-v11 training-parallel-nc-v11.tgz + +# Validation and test data are put into the $DATA_PATH/test folder +echo "Downloading and extracting newstest2014 data (4 MB) for validation..." +wget --trust-server-names http://www.statmt.org/wmt14/test-filtered.tgz +echo "Downloading and extracting newstest2017 data (5 MB) for testing..." +wget --trust-server-names http://data.statmt.org/wmt17/translation-task/test.tgz +tar zxvf test-filtered.tgz && tar zxvf test.tgz +cd test && ls | grep -v '.*deen\|.*ende' | xargs rm +cd .. && rm test-filtered.tgz test.tgz && cd .. + +# set training, validation, and test corpuses +corpus[1]=commoncrawl.de-en +corpus[2]=europarl-v7.de-en +corpus[3]=news-commentary-v11.de-en +#corpus[3]=news-commentary-v12.de-en +#corpus[4]=news.bt.en-de +#corpus[5]=rapid2016.de-en + +validset=newstest2014-deen +testset=newstest2017-ende + +cd $CUR_DIR + +# retrieve file preparation from Moses repository +wget -nc \ + https://raw.githubusercontent.com/moses-smt/mosesdecoder/master/scripts/ems/support/input-from-sgm.perl \ + -O $TEST_PATH/input-from-sgm.perl + +################################################################################## +# Starting from here, original files are supposed to be in $DATA_PATH +# a data folder will be created in scripts/wmt +################################################################################## + +export PATH=$SP_PATH:$PATH + +# Data preparation using SentencePiece +# First we concat all the datasets to train the SP model +if false; then + echo "$0: Training sentencepiece model" + rm -f $DATA_PATH/train.txt + for ((i=1; i<= ${#corpus[@]}; i++)) + do + for f in $DATA_PATH/${corpus[$i]}.$sl $DATA_PATH/${corpus[$i]}.$tl + do + cat $f >> $DATA_PATH/train.txt + done + done + spm_train --input=$DATA_PATH/train.txt --model_prefix=$DATA_PATH/wmt$sl$tl \ + --vocab_size=$vocab_size --character_coverage=1 + rm $DATA_PATH/train.txt +fi + +# Second we use the trained model to tokenize all the files +# This is not necessary, as it can be done on the fly in OpenNMT-py 2.0 +# if false; then +# echo "$0: Tokenizing with sentencepiece model" +# rm -f $DATA_PATH/train.txt +# for ((i=1; i<= ${#corpus[@]}; i++)) +# do +# for f in $DATA_PATH/${corpus[$i]}.$sl $DATA_PATH/${corpus[$i]}.$tl +# do +# file=$(basename $f) +# spm_encode --model=$DATA_PATH/wmt$sl$tl.model < $f > $DATA_PATH/$file.sp +# done +# done +# fi + +# We concat the training sets into two (src/tgt) tokenized files +# if false; then +# cat $DATA_PATH/*.$sl.sp > $DATA_PATH/train.$sl +# cat $DATA_PATH/*.$tl.sp > $DATA_PATH/train.$tl +# fi + +# We use the same tokenization method for a valid set (and test set) +# if true; then +# perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$validset-src.$sl.sgm \ +# | spm_encode --model=$DATA_PATH/wmt$sl$tl.model > $DATA_PATH/valid.$sl.sp +# perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$validset-ref.$tl.sgm \ +# | spm_encode --model=$DATA_PATH/wmt$sl$tl.model > $DATA_PATH/valid.$tl.sp +# perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$testset-src.$sl.sgm \ +# | spm_encode --model=$DATA_PATH/wmt$sl$tl.model > $DATA_PATH/test.$sl.sp +# perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$testset-ref.$tl.sgm \ +# | spm_encode --model=$DATA_PATH/wmt$sl$tl.model > $DATA_PATH/test.$tl.sp +# fi + +# Parse the valid and test sets +if true; then + perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$validset-src.$sl.sgm \ + > $DATA_PATH/valid.$sl + perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$validset-ref.$tl.sgm \ + > $DATA_PATH/valid.$tl + perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$testset-src.$sl.sgm \ + > $DATA_PATH/test.$sl + perl $TEST_PATH/input-from-sgm.perl < $TEST_PATH/$testset-ref.$tl.sgm \ + > $DATA_PATH/test.$tl +fi diff --git a/examples/wmt14_en_de.yaml b/examples/wmt14_en_de.yaml new file mode 100644 index 0000000000..573108c927 --- /dev/null +++ b/examples/wmt14_en_de.yaml @@ -0,0 +1,108 @@ +# wmt14_en_de.yaml +save_data: data/wmt/run/example + +# Corpus opts: +data: + commoncrawl: + path_src: data/wmt/commoncrawl.de-en.en + path_tgt: data/wmt/commoncrawl.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 23 + europarl: + path_src: data/wmt/europarl-v7.de-en.en + path_tgt: data/wmt/europarl-v7.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 19 + news_commentary: + path_src: data/wmt/news-commentary-v11.de-en.en + path_tgt: data/wmt/news-commentary-v11.de-en.de + transforms: [sentencepiece, filtertoolong] + weight: 3 + valid: + path_src: data/wmt/valid.en + path_tgt: data/wmt/valid.de + transforms: [sentencepiece] + +### Transform related opts: +#### Subword +src_subword_model: data/wmt/wmtende.model +tgt_subword_model: data/wmt/wmtende.model +# src_subword_type: sentencepiece +# tgt_subword_type: sentencepiece +# onmttok_kwargs: "{'mode': 'none', 'spacer_annotate': True}" + +subword_nbest: 1 +subword_alpha: 0.0 +#### Filter +src_seq_length: 150 +tgt_seq_length: 150 + +# silently ignore empty lines in the data +skip_empty_level: silent + + +# # Vocab opts +# ### vocab: +src_vocab: data/wmt/run/example.vocab.src +tgt_vocab: data/wmt/run/example.vocab.tgt +src_vocab_size: 32000 +tgt_vocab_size: 32000 +vocab_size_multiple: 8 +src_words_min_frequency: 1 +tgt_words_min_frequency: 1 +share_vocab: True + +# # Model training parameters + +# General opts +save_model: data/wmt/run/model +keep_checkpoint: 50 +save_checkpoint_steps: 5000 +average_decay: 0.0005 +seed: 1234 +report_every: 100 +train_steps: 100000 +valid_steps: 5000 + +# Batching +queue_size: 1024 +bucket_size: 32768 +pool_factor: 8192 +world_size: 2 +gpu_ranks: [0, 1] +batch_type: "tokens" +batch_size: 4096 +valid_batch_size: 16 +batch_size_multiple: 1 +max_generator_batches: 0 +accum_count: [3] +accum_steps: [0] + +# Optimization +model_dtype: "fp32" +optim: "adam" +learning_rate: 2 +warmup_steps: 6000 +decay_method: "noam" +adam_beta2: 0.998 +max_grad_norm: 0 +label_smoothing: 0.1 +param_init: 0 +param_init_glorot: true +normalization: "tokens" + +# Model +encoder_type: transformer +decoder_type: transformer +enc_layers: 6 +dec_layers: 6 +heads: 8 +rnn_size: 512 +word_vec_size: 512 +transformer_ff: 2048 +dropout_steps: [0] +dropout: [0.1] +attention_dropout: [0.1] +share_decoder_embeddings: true +share_embeddings: true +position_encoding: true diff --git a/onmt/bin/build_vocab.py b/onmt/bin/build_vocab.py new file mode 100644 index 0000000000..c7b4fbaf06 --- /dev/null +++ b/onmt/bin/build_vocab.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +"""Get vocabulary coutings from transformed corpora samples.""" +from onmt.utils.logging import init_logger +from onmt.utils.misc import set_random_seed +from onmt.utils.parse import ArgumentParser +from onmt.opts import dynamic_prepare_opts +from onmt.inputters.corpus import save_transformed_sample +from onmt.transforms import make_transforms, get_transforms_cls + + +def build_vocab_main(opts): + """Apply transforms to samples of specified data and build vocab from it. + + Transforms that need vocab will be disabled in this. + Built vocab is saved in plain text format as following and can be pass as + `-src_vocab` (and `-tgt_vocab`) when training: + ``` + \t + \t + ``` + """ + + ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) + assert opts.n_sample == -1 or opts.n_sample > 1, \ + f"Illegal argument n_sample={opts.n_sample}." + + logger = init_logger() + set_random_seed(opts.seed, False) + transforms_cls = get_transforms_cls(opts._all_transform) + fields = None + + transforms = make_transforms(opts, transforms_cls, fields) + + logger.info(f"Counter vocab from {opts.n_sample} samples.") + src_counter, tgt_counter = save_transformed_sample( + opts, transforms, n_sample=opts.n_sample, build_vocab=True) + + logger.info(f"Counters src:{len(src_counter)}") + logger.info(f"Counters tgt:{len(tgt_counter)}") + if opts.share_vocab: + src_counter += tgt_counter + tgt_counter = src_counter + logger.info(f"Counters after share:{len(src_counter)}") + + def save_counter(counter, save_path): + with open(save_path, "w") as fo: + for tok, count in counter.most_common(): + fo.write(tok + "\t" + str(count) + "\n") + + save_counter(src_counter, opts.save_data + '.vocab.src') + save_counter(tgt_counter, opts.save_data + '.vocab.tgt') + + +def _get_parser(): + parser = ArgumentParser(description='build_vocab.py') + dynamic_prepare_opts(parser, build_vocab_only=True) + return parser + + +def main(): + parser = _get_parser() + opts, unknown = parser.parse_known_args() + build_vocab_main(opts) + + +if __name__ == '__main__': + main() diff --git a/onmt/bin/preprocess.py b/onmt/bin/preprocess.py deleted file mode 100755 index e949f32bfe..0000000000 --- a/onmt/bin/preprocess.py +++ /dev/null @@ -1,322 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Pre-process Data / features files and build vocabulary -""" -import codecs -import glob -import gc -import torch -from collections import Counter, defaultdict - -from onmt.utils.logging import init_logger, logger -from onmt.utils.misc import split_corpus -import onmt.inputters as inputters -import onmt.opts as opts -from onmt.utils.parse import ArgumentParser -from onmt.inputters.inputter import _build_fields_vocab,\ - _load_vocab, \ - old_style_vocab, \ - load_old_vocab - -from functools import partial -from multiprocessing import Pool - - -def check_existing_pt_files(opt, corpus_type, ids, existing_fields): - """ Check if there are existing .pt files to avoid overwriting them """ - existing_shards = [] - for maybe_id in ids: - if maybe_id: - shard_base = corpus_type + "_" + maybe_id - else: - shard_base = corpus_type - pattern = opt.save_data + '.{}.*.pt'.format(shard_base) - if glob.glob(pattern): - if opt.overwrite: - maybe_overwrite = ("will be overwritten because " - "`-overwrite` option is set.") - else: - maybe_overwrite = ("won't be overwritten, pass the " - "`-overwrite` option if you want to.") - logger.warning("Shards for corpus {} already exist, {}" - .format(shard_base, maybe_overwrite)) - existing_shards += [maybe_id] - return existing_shards - - -def process_one_shard(corpus_params, params): - corpus_type, fields, src_reader, tgt_reader, align_reader, opt,\ - existing_fields, src_vocab, tgt_vocab = corpus_params - i, (src_shard, tgt_shard, align_shard, maybe_id, filter_pred) = params - # create one counter per shard - sub_sub_counter = defaultdict(Counter) - assert len(src_shard) == len(tgt_shard) - logger.info("Building shard %d." % i) - - src_data = {"reader": src_reader, "data": src_shard, "dir": opt.src_dir} - tgt_data = {"reader": tgt_reader, "data": tgt_shard, "dir": None} - align_data = {"reader": align_reader, "data": align_shard, "dir": None} - _readers, _data, _dir = inputters.Dataset.config( - [('src', src_data), ('tgt', tgt_data), ('align', align_data)]) - - dataset = inputters.Dataset( - fields, readers=_readers, data=_data, dirs=_dir, - sort_key=inputters.str2sortkey[opt.data_type], - filter_pred=filter_pred, - corpus_id=maybe_id - ) - if corpus_type == "train" and existing_fields is None: - for ex in dataset.examples: - sub_sub_counter['corpus_id'].update( - ["train" if maybe_id is None else maybe_id]) - for name, field in fields.items(): - if (opt.data_type in ["audio", "vec"]) and name == "src": - continue - try: - f_iter = iter(field) - except TypeError: - f_iter = [(name, field)] - all_data = [getattr(ex, name, None)] - else: - all_data = getattr(ex, name) - for (sub_n, sub_f), fd in zip( - f_iter, all_data): - has_vocab = (sub_n == 'src' and - src_vocab is not None) or \ - (sub_n == 'tgt' and - tgt_vocab is not None) - if (hasattr(sub_f, 'sequential') - and sub_f.sequential and not has_vocab): - val = fd - sub_sub_counter[sub_n].update(val) - if maybe_id: - shard_base = corpus_type + "_" + maybe_id - else: - shard_base = corpus_type - data_path = "{:s}.{:s}.{:d}.pt".\ - format(opt.save_data, shard_base, i) - - logger.info(" * saving %sth %s data shard to %s." - % (i, shard_base, data_path)) - - dataset.save(data_path) - - del dataset.examples - gc.collect() - del dataset - gc.collect() - - return sub_sub_counter - - -def maybe_load_vocab(corpus_type, counters, opt): - src_vocab = None - tgt_vocab = None - existing_fields = None - if corpus_type == "train": - if opt.src_vocab != "": - try: - logger.info("Using existing vocabulary...") - existing_fields = torch.load(opt.src_vocab) - except torch.serialization.pickle.UnpicklingError: - logger.info("Building vocab from text file...") - src_vocab, src_vocab_size = _load_vocab( - opt.src_vocab, "src", counters, - opt.src_words_min_frequency) - if opt.tgt_vocab != "": - tgt_vocab, tgt_vocab_size = _load_vocab( - opt.tgt_vocab, "tgt", counters, - opt.tgt_words_min_frequency) - return src_vocab, tgt_vocab, existing_fields - - -def build_save_dataset(corpus_type, fields, src_reader, tgt_reader, - align_reader, opt): - assert corpus_type in ['train', 'valid'] - - if corpus_type == 'train': - counters = defaultdict(Counter) - srcs = opt.train_src - tgts = opt.train_tgt - ids = opt.train_ids - aligns = opt.train_align - elif corpus_type == 'valid': - counters = None - srcs = [opt.valid_src] - tgts = [opt.valid_tgt] - ids = [None] - aligns = [opt.valid_align] - - src_vocab, tgt_vocab, existing_fields = maybe_load_vocab( - corpus_type, counters, opt) - - existing_shards = check_existing_pt_files( - opt, corpus_type, ids, existing_fields) - - # every corpus has shards, no new one - if existing_shards == ids and not opt.overwrite: - return - - def shard_iterator(srcs, tgts, ids, aligns, existing_shards, - existing_fields, corpus_type, opt): - """ - Builds a single iterator yielding every shard of every corpus. - """ - for src, tgt, maybe_id, maybe_align in zip(srcs, tgts, ids, aligns): - if maybe_id in existing_shards: - if opt.overwrite: - logger.warning("Overwrite shards for corpus {}" - .format(maybe_id)) - else: - if corpus_type == "train": - assert existing_fields is not None,\ - ("A 'vocab.pt' file should be passed to " - "`-src_vocab` when adding a corpus to " - "a set of already existing shards.") - logger.warning("Ignore corpus {} because " - "shards already exist" - .format(maybe_id)) - continue - if ((corpus_type == "train" or opt.filter_valid) - and tgt is not None): - filter_pred = partial( - inputters.filter_example, - use_src_len=opt.data_type == "text", - max_src_len=opt.src_seq_length, - max_tgt_len=opt.tgt_seq_length) - else: - filter_pred = None - src_shards = split_corpus(src, opt.shard_size) - tgt_shards = split_corpus(tgt, opt.shard_size) - align_shards = split_corpus(maybe_align, opt.shard_size) - for i, (ss, ts, a_s) in enumerate( - zip(src_shards, tgt_shards, align_shards)): - yield (i, (ss, ts, a_s, maybe_id, filter_pred)) - - shard_iter = shard_iterator(srcs, tgts, ids, aligns, existing_shards, - existing_fields, corpus_type, opt) - - with Pool(opt.num_threads) as p: - dataset_params = (corpus_type, fields, src_reader, tgt_reader, - align_reader, opt, existing_fields, - src_vocab, tgt_vocab) - func = partial(process_one_shard, dataset_params) - for sub_counter in p.imap(func, shard_iter): - if sub_counter is not None: - for key, value in sub_counter.items(): - counters[key].update(value) - - if corpus_type == "train": - vocab_path = opt.save_data + '.vocab.pt' - new_fields = _build_fields_vocab( - fields, counters, opt.data_type, - opt.share_vocab, opt.vocab_size_multiple, - opt.src_vocab_size, opt.src_words_min_frequency, - opt.tgt_vocab_size, opt.tgt_words_min_frequency, - subword_prefix=opt.subword_prefix, - subword_prefix_is_joiner=opt.subword_prefix_is_joiner) - if existing_fields is None: - fields = new_fields - else: - fields = existing_fields - - if old_style_vocab(fields): - fields = load_old_vocab( - fields, opt.data_type, dynamic_dict=opt.dynamic_dict) - - # patch corpus_id - if fields.get("corpus_id", False): - fields["corpus_id"].vocab = new_fields["corpus_id"].vocab_cls( - counters["corpus_id"]) - - torch.save(fields, vocab_path) - - -def build_save_vocab(train_dataset, fields, opt): - fields = inputters.build_vocab( - train_dataset, fields, opt.data_type, opt.share_vocab, - opt.src_vocab, opt.src_vocab_size, opt.src_words_min_frequency, - opt.tgt_vocab, opt.tgt_vocab_size, opt.tgt_words_min_frequency, - vocab_size_multiple=opt.vocab_size_multiple - ) - vocab_path = opt.save_data + '.vocab.pt' - torch.save(fields, vocab_path) - - -def count_features(path): - """ - path: location of a corpus file with whitespace-delimited tokens and - │-delimited features within the token - returns: the number of features in the dataset - """ - with codecs.open(path, "r", "utf-8") as f: - first_tok = f.readline().split(None, 1)[0] - return len(first_tok.split(u"│")) - 1 - - -def preprocess(opt): - ArgumentParser.validate_preprocess_args(opt) - torch.manual_seed(opt.seed) - - init_logger(opt.log_file) - - logger.info("Extracting features...") - - src_nfeats = 0 - tgt_nfeats = 0 - src_nfeats = count_features(opt.train_src[0]) if opt.data_type == 'text' \ - else 0 - tgt_nfeats = count_features(opt.train_tgt[0]) # tgt always text so far - if len(opt.train_src) > 1 and opt.data_type == 'text': - for src, tgt in zip(opt.train_src[1:], opt.train_tgt[1:]): - assert src_nfeats == count_features(src),\ - "%s seems to mismatch features of "\ - "the other source datasets" % src - assert tgt_nfeats == count_features(tgt),\ - "%s seems to mismatch features of "\ - "the other target datasets" % tgt - logger.info(" * number of source features: %d." % src_nfeats) - logger.info(" * number of target features: %d." % tgt_nfeats) - - logger.info("Building `Fields` object...") - fields = inputters.get_fields( - opt.data_type, - src_nfeats, - tgt_nfeats, - dynamic_dict=opt.dynamic_dict, - with_align=opt.train_align[0] is not None, - src_truncate=opt.src_seq_length_trunc, - tgt_truncate=opt.tgt_seq_length_trunc) - - src_reader = inputters.str2reader[opt.data_type].from_opt(opt) - tgt_reader = inputters.str2reader["text"].from_opt(opt) - align_reader = inputters.str2reader["text"].from_opt(opt) - - logger.info("Building & saving training data...") - build_save_dataset( - 'train', fields, src_reader, tgt_reader, align_reader, opt) - - if opt.valid_src and opt.valid_tgt: - logger.info("Building & saving validation data...") - build_save_dataset( - 'valid', fields, src_reader, tgt_reader, align_reader, opt) - - -def _get_parser(): - parser = ArgumentParser(description='preprocess.py') - - opts.config_opts(parser) - opts.preprocess_opts(parser) - return parser - - -def main(): - parser = _get_parser() - - opt = parser.parse_args() - preprocess(opt) - - -if __name__ == "__main__": - main() diff --git a/onmt/bin/train.py b/onmt/bin/train.py old mode 100755 new mode 100644 index c6122c369a..b31d8105d0 --- a/onmt/bin/train.py +++ b/onmt/bin/train.py @@ -1,70 +1,116 @@ #!/usr/bin/env python -"""Train models.""" -import os -import signal +"""Train models with dynamic data.""" +import sys import torch +from functools import partial -import onmt.opts as opts -import onmt.utils.distributed - +# import onmt.opts as opts +from onmt.utils.distributed import ErrorHandler, consumer, batch_producer from onmt.utils.misc import set_random_seed +from onmt.modules.embeddings import prepare_pretrained_embeddings from onmt.utils.logging import init_logger, logger -from onmt.train_single import main as single_main + +from onmt.models.model_saver import load_checkpoint +from onmt.train_single import main as single_main, _build_train_iter + from onmt.utils.parse import ArgumentParser -from onmt.inputters.inputter import build_dataset_iter, patch_fields, \ - load_old_vocab, old_style_vocab, build_dataset_iter_multiple +from onmt.opts import train_opts +from onmt.inputters.corpus import save_transformed_sample +from onmt.inputters.fields import build_dynamic_fields, save_fields, \ + load_fields +from onmt.transforms import make_transforms, save_transforms, \ + get_specials, get_transforms_cls + +# Set sharing strategy manually instead of default based on the OS. +torch.multiprocessing.set_sharing_strategy('file_system') -from itertools import cycle +def prepare_fields_transforms(opt): + """Prepare or dump fields & transforms before training.""" + transforms_cls = get_transforms_cls(opt._all_transform) + specials = get_specials(opt, transforms_cls) -# Fix CPU tensor sharing strategy -torch.multiprocessing.set_sharing_strategy('file_system') + fields = build_dynamic_fields( + opt, src_specials=specials['src'], tgt_specials=specials['tgt']) + # maybe prepare pretrained embeddings, if any + prepare_pretrained_embeddings(opt, fields) -def train(opt): - ArgumentParser.validate_train_opts(opt) - ArgumentParser.update_model_opts(opt) - ArgumentParser.validate_model_opts(opt) + if opt.dump_fields: + save_fields(fields, opt.save_data, overwrite=opt.overwrite) + if opt.dump_transforms or opt.n_sample != 0: + transforms = make_transforms(opt, transforms_cls, fields) + if opt.dump_transforms: + save_transforms(transforms, opt.save_data, overwrite=opt.overwrite) + if opt.n_sample != 0: + logger.warning( + "`-n_sample` != 0: Training will not be started. " + f"Stop after saving {opt.n_sample} samples/corpus.") + save_transformed_sample(opt, transforms, n_sample=opt.n_sample) + logger.info( + "Sample saved, please check it before restart training.") + sys.exit() + return fields, transforms_cls - set_random_seed(opt.seed, False) - # Load checkpoint if we resume from a previous training. +def _init_train(opt): + """Common initilization stuff for all training process.""" + ArgumentParser.validate_prepare_opts(opt) + if opt.train_from: - logger.info('Loading checkpoint from %s' % opt.train_from) - checkpoint = torch.load(opt.train_from, - map_location=lambda storage, loc: storage) - logger.info('Loading vocab from checkpoint at %s.' % opt.train_from) - vocab = checkpoint['vocab'] + # Load checkpoint if we resume from a previous training. + checkpoint = load_checkpoint(ckpt_path=opt.train_from) + fields = load_fields(opt.save_data, checkpoint) + transforms_cls = get_transforms_cls(opt._all_transform) + if (hasattr(checkpoint["opt"], '_all_transform') and + len(opt._all_transform.symmetric_difference( + checkpoint["opt"]._all_transform)) != 0): + _msg = "configured transforms is different from checkpoint:" + new_transf = opt._all_transform.difference( + checkpoint["opt"]._all_transform) + old_transf = checkpoint["opt"]._all_transform.difference( + opt._all_transform) + if len(new_transf) != 0: + _msg += f" +{new_transf}" + if len(old_transf) != 0: + _msg += f" -{old_transf}." + logger.warning(_msg) else: - vocab = torch.load(opt.data + '.vocab.pt') + checkpoint = None + fields, transforms_cls = prepare_fields_transforms(opt) + + # Report src and tgt vocab sizes + for side in ['src', 'tgt']: + f = fields[side] + try: + f_iter = iter(f) + except TypeError: + f_iter = [(side, f)] + for sn, sf in f_iter: + if sf.use_vocab: + logger.info(' * %s vocab size = %d' % (sn, len(sf.vocab))) + return checkpoint, fields, transforms_cls - # check for code where vocab is saved instead of fields - # (in the future this will be done in a smarter way) - if old_style_vocab(vocab): - fields = load_old_vocab( - vocab, opt.model_type, dynamic_dict=opt.copy_attn) - else: - fields = vocab - # patch for fields that may be missing in old data/model - patch_fields(opt, fields) +def train(opt): + init_logger(opt.log_file) + ArgumentParser.validate_train_opts(opt) + ArgumentParser.update_model_opts(opt) + ArgumentParser.validate_model_opts(opt) - if len(opt.data_ids) > 1: - train_shards = [] - for train_id in opt.data_ids: - shard_base = "train_" + train_id - train_shards.append(shard_base) - train_iter = build_dataset_iter_multiple(train_shards, fields, opt) - else: - if opt.data_ids[0] is not None: - shard_base = "train_" + opt.data_ids[0] - else: - shard_base = "train" - train_iter = build_dataset_iter(shard_base, fields, opt) + set_random_seed(opt.seed, False) + + checkpoint, fields, transforms_cls = _init_train(opt) + train_process = partial( + single_main, + fields=fields, + transforms_cls=transforms_cls, + checkpoint=checkpoint) nb_gpu = len(opt.gpu_ranks) if opt.world_size > 1: + queues = [] mp = torch.multiprocessing.get_context('spawn') semaphore = mp.Semaphore(opt.world_size * opt.queue_size) @@ -76,124 +122,50 @@ def train(opt): for device_id in range(nb_gpu): q = mp.Queue(opt.queue_size) queues += [q] - procs.append(mp.Process(target=run, args=( - opt, device_id, error_queue, q, semaphore), daemon=True)) + procs.append(mp.Process(target=consumer, args=( + train_process, opt, device_id, error_queue, q, semaphore), + daemon=True)) procs[device_id].start() logger.info(" Starting process pid: %d " % procs[device_id].pid) error_handler.add_child(procs[device_id].pid) - producer = mp.Process(target=batch_producer, - args=(train_iter, queues, semaphore, opt,), - daemon=True) - producer.start() - error_handler.add_child(producer.pid) + producers = [] + # This does not work if we merge with the first loop, not sure why + for device_id in range(nb_gpu): + # Get the iterator to generate from + train_iter = _build_train_iter( + opt, fields, transforms_cls, stride=nb_gpu, offset=device_id) + producer = mp.Process(target=batch_producer, + args=(train_iter, queues[device_id], + semaphore, opt,), + daemon=True) + producers.append(producer) + producers[device_id].start() + logger.info(" Starting producer process pid: {} ".format( + producers[device_id].pid)) + error_handler.add_child(producers[device_id].pid) for p in procs: p.join() - producer.terminate() + # Once training is done, we can terminate the producers + for p in producers: + p.terminate() elif nb_gpu == 1: # case 1 GPU only - single_main(opt, 0) + train_process(opt, device_id=0) else: # case only CPU - single_main(opt, -1) - - -def batch_producer(generator_to_serve, queues, semaphore, opt): - init_logger(opt.log_file) - set_random_seed(opt.seed, False) - # generator_to_serve = iter(generator_to_serve) - - def pred(x): - """ - Filters batches that belong only - to gpu_ranks of current node - """ - for rank in opt.gpu_ranks: - if x[0] % opt.world_size == rank: - return True - - generator_to_serve = filter( - pred, enumerate(generator_to_serve)) - - def next_batch(device_id): - new_batch = next(generator_to_serve) - semaphore.acquire() - return new_batch[1] - - b = next_batch(0) - - for device_id, q in cycle(enumerate(queues)): - b.dataset = None - # hack to dodge unpicklable `dict_keys` - b.fields = list(b.fields) - q.put(b) - b = next_batch(device_id) - - -def run(opt, device_id, error_queue, batch_queue, semaphore): - """ run process """ - try: - gpu_rank = onmt.utils.distributed.multi_init(opt, device_id) - if gpu_rank != opt.gpu_ranks[device_id]: - raise AssertionError("An error occurred in \ - Distributed initialization") - single_main(opt, device_id, batch_queue, semaphore) - except KeyboardInterrupt: - pass # killed by parent, do nothing - except Exception: - # propagate exception to parent process, keeping original traceback - import traceback - error_queue.put((opt.gpu_ranks[device_id], traceback.format_exc())) - - -class ErrorHandler(object): - """A class that listens for exceptions in children processes and propagates - the tracebacks to the parent process.""" - - def __init__(self, error_queue): - """ init error handler """ - import signal - import threading - self.error_queue = error_queue - self.children_pids = [] - self.error_thread = threading.Thread( - target=self.error_listener, daemon=True) - self.error_thread.start() - signal.signal(signal.SIGUSR1, self.signal_handler) - - def add_child(self, pid): - """ error handler """ - self.children_pids.append(pid) - - def error_listener(self): - """ error listener """ - (rank, original_trace) = self.error_queue.get() - self.error_queue.put((rank, original_trace)) - os.kill(os.getpid(), signal.SIGUSR1) - - def signal_handler(self, signalnum, stackframe): - """ signal handler """ - for pid in self.children_pids: - os.kill(pid, signal.SIGINT) # kill children processes - (rank, original_trace) = self.error_queue.get() - msg = """\n\n-- Tracebacks above this line can probably - be ignored --\n\n""" - msg += original_trace - raise Exception(msg) + train_process(opt, device_id=-1) def _get_parser(): parser = ArgumentParser(description='train.py') - - opts.config_opts(parser) - opts.model_opts(parser) - opts.train_opts(parser) + train_opts(parser) return parser def main(): parser = _get_parser() - opt = parser.parse_args() + opt, unknown = parser.parse_known_args() train(opt) diff --git a/onmt/bin/translate.py b/onmt/bin/translate.py index f81742ca02..49bdb496d1 100755 --- a/onmt/bin/translate.py +++ b/onmt/bin/translate.py @@ -25,7 +25,6 @@ def translate(opt): translator.translate( src=src_shard, tgt=tgt_shard, - src_dir=opt.src_dir, batch_size=opt.batch_size, batch_type=opt.batch_type, attn_debug=opt.attn_debug, diff --git a/onmt/constants.py b/onmt/constants.py new file mode 100644 index 0000000000..d1c21a8290 --- /dev/null +++ b/onmt/constants.py @@ -0,0 +1,24 @@ +"""Define constant values used across the project.""" + + +class DefaultTokens(object): + PAD = '' + BOS = '' + EOS = '' + UNK = '' + MASK = '' + VOCAB_PAD = 'averyunlikelytoken' + SENT_FULL_STOPS = [".", "?", "!"] + PHRASE_TABLE_SEPARATOR = '|||' + ALIGNMENT_SEPARATOR = ' ||| ' + + +class CorpusName(object): + VALID = 'valid' + TRAIN = 'train' + SAMPLE = 'sample' + + +class SubwordMarker(object): + SPACER = '▁' + JOINER = '■' diff --git a/onmt/encoders/__init__.py b/onmt/encoders/__init__.py index 1d273801d2..d8070817b5 100644 --- a/onmt/encoders/__init__.py +++ b/onmt/encoders/__init__.py @@ -5,13 +5,11 @@ from onmt.encoders.rnn_encoder import RNNEncoder from onmt.encoders.cnn_encoder import CNNEncoder from onmt.encoders.mean_encoder import MeanEncoder -from onmt.encoders.audio_encoder import AudioEncoder -from onmt.encoders.image_encoder import ImageEncoder str2enc = {"ggnn": GGNNEncoder, "rnn": RNNEncoder, "brnn": RNNEncoder, "cnn": CNNEncoder, "transformer": TransformerEncoder, - "img": ImageEncoder, "audio": AudioEncoder, "mean": MeanEncoder} + "mean": MeanEncoder} __all__ = ["EncoderBase", "TransformerEncoder", "RNNEncoder", "CNNEncoder", "MeanEncoder", "str2enc"] diff --git a/onmt/encoders/audio_encoder.py b/onmt/encoders/audio_encoder.py deleted file mode 100644 index 373f7317f7..0000000000 --- a/onmt/encoders/audio_encoder.py +++ /dev/null @@ -1,146 +0,0 @@ -"""Audio encoder""" -import math - -import torch.nn as nn - -from torch.nn.utils.rnn import pack_padded_sequence as pack -from torch.nn.utils.rnn import pad_packed_sequence as unpack - -from onmt.utils.rnn_factory import rnn_factory -from onmt.encoders.encoder import EncoderBase - - -class AudioEncoder(EncoderBase): - """A simple encoder CNN -> RNN for audio input. - - Args: - rnn_type (str): Type of RNN (e.g. GRU, LSTM, etc). - enc_layers (int): Number of encoder layers. - dec_layers (int): Number of decoder layers. - brnn (bool): Bidirectional encoder. - enc_rnn_size (int): Size of hidden states of the rnn. - dec_rnn_size (int): Size of the decoder hidden states. - enc_pooling (str): A comma separated list either of length 1 - or of length ``enc_layers`` specifying the pooling amount. - dropout (float): dropout probablity. - sample_rate (float): input spec - window_size (int): input spec - """ - - def __init__(self, rnn_type, enc_layers, dec_layers, brnn, - enc_rnn_size, dec_rnn_size, enc_pooling, dropout, - sample_rate, window_size): - super(AudioEncoder, self).__init__() - self.enc_layers = enc_layers - self.rnn_type = rnn_type - self.dec_layers = dec_layers - num_directions = 2 if brnn else 1 - self.num_directions = num_directions - assert enc_rnn_size % num_directions == 0 - enc_rnn_size_real = enc_rnn_size // num_directions - assert dec_rnn_size % num_directions == 0 - self.dec_rnn_size = dec_rnn_size - dec_rnn_size_real = dec_rnn_size // num_directions - self.dec_rnn_size_real = dec_rnn_size_real - self.dec_rnn_size = dec_rnn_size - input_size = int(math.floor((sample_rate * window_size) / 2) + 1) - enc_pooling = enc_pooling.split(',') - assert len(enc_pooling) == enc_layers or len(enc_pooling) == 1 - if len(enc_pooling) == 1: - enc_pooling = enc_pooling * enc_layers - enc_pooling = [int(p) for p in enc_pooling] - self.enc_pooling = enc_pooling - - if type(dropout) is not list: - dropout = [dropout] - if max(dropout) > 0: - self.dropout = nn.Dropout(dropout[0]) - else: - self.dropout = None - self.W = nn.Linear(enc_rnn_size, dec_rnn_size, bias=False) - self.batchnorm_0 = nn.BatchNorm1d(enc_rnn_size, affine=True) - self.rnn_0, self.no_pack_padded_seq = \ - rnn_factory(rnn_type, - input_size=input_size, - hidden_size=enc_rnn_size_real, - num_layers=1, - dropout=dropout[0], - bidirectional=brnn) - self.pool_0 = nn.MaxPool1d(enc_pooling[0]) - for l in range(enc_layers - 1): - batchnorm = nn.BatchNorm1d(enc_rnn_size, affine=True) - rnn, _ = \ - rnn_factory(rnn_type, - input_size=enc_rnn_size, - hidden_size=enc_rnn_size_real, - num_layers=1, - dropout=dropout[0], - bidirectional=brnn) - setattr(self, 'rnn_%d' % (l + 1), rnn) - setattr(self, 'pool_%d' % (l + 1), - nn.MaxPool1d(enc_pooling[l + 1])) - setattr(self, 'batchnorm_%d' % (l + 1), batchnorm) - - @classmethod - def from_opt(cls, opt, embeddings=None): - """Alternate constructor.""" - if embeddings is not None: - raise ValueError("Cannot use embeddings with AudioEncoder.") - return cls( - opt.rnn_type, - opt.enc_layers, - opt.dec_layers, - opt.brnn, - opt.enc_rnn_size, - opt.dec_rnn_size, - opt.audio_enc_pooling, - opt.dropout, - opt.sample_rate, - opt.window_size) - - def forward(self, src, lengths=None): - """See :func:`onmt.encoders.encoder.EncoderBase.forward()`""" - batch_size, _, nfft, t = src.size() - src = src.transpose(0, 1).transpose(0, 3).contiguous() \ - .view(t, batch_size, nfft) - orig_lengths = lengths - lengths = lengths.view(-1).tolist() - - for l in range(self.enc_layers): - rnn = getattr(self, 'rnn_%d' % l) - pool = getattr(self, 'pool_%d' % l) - batchnorm = getattr(self, 'batchnorm_%d' % l) - stride = self.enc_pooling[l] - packed_emb = pack(src, lengths) - memory_bank, tmp = rnn(packed_emb) - memory_bank = unpack(memory_bank)[0] - t, _, _ = memory_bank.size() - memory_bank = memory_bank.transpose(0, 2) - memory_bank = pool(memory_bank) - lengths = [int(math.floor((length - stride) / stride + 1)) - for length in lengths] - memory_bank = memory_bank.transpose(0, 2) - src = memory_bank - t, _, num_feat = src.size() - src = batchnorm(src.contiguous().view(-1, num_feat)) - src = src.view(t, -1, num_feat) - if self.dropout and l + 1 != self.enc_layers: - src = self.dropout(src) - - memory_bank = memory_bank.contiguous().view(-1, memory_bank.size(2)) - memory_bank = self.W(memory_bank).view(-1, batch_size, - self.dec_rnn_size) - - state = memory_bank.new_full((self.dec_layers * self.num_directions, - batch_size, self.dec_rnn_size_real), 0) - if self.rnn_type == 'LSTM': - # The encoder hidden is (layers*directions) x batch x dim. - encoder_final = (state, state) - else: - encoder_final = state - return encoder_final, memory_bank, orig_lengths.new_tensor(lengths) - - def update_dropout(self, dropout): - self.dropout.p = dropout - for i in range(self.enc_layers - 1): - getattr(self, 'rnn_%d' % i).dropout = dropout diff --git a/onmt/encoders/ggnn_encoder.py b/onmt/encoders/ggnn_encoder.py index 6a98cce353..7db8e412d9 100644 --- a/onmt/encoders/ggnn_encoder.py +++ b/onmt/encoders/ggnn_encoder.py @@ -79,7 +79,9 @@ class GGNNEncoder(EncoderBase): bridge_extra_node (bool): True indicates only 1st extra node (after token listing) should be used for decoder init. n_steps (int): Steps to advance graph encoder for stabilization - src_vocab (int): Path to source vocabulary + src_vocab (int): Path to source vocabulary.(The ggnn uses src_vocab + during training because the graph is built using edge information + which requires parsing the input sequence.) """ def __init__(self, rnn_type, state_dim, bidir_edges, @@ -208,12 +210,8 @@ def forward(self, src, lengths=None): + source_node] = 1 source_node = -1 - if torch.cuda.is_available(): - prop_state = torch.from_numpy(prop_state).float().to("cuda:0") - edges = torch.from_numpy(edges).float().to("cuda:0") - else: - prop_state = torch.from_numpy(prop_state).float() - edges = torch.from_numpy(edges).float() + prop_state = torch.from_numpy(prop_state).float().to(src.device) + edges = torch.from_numpy(edges).float().to(src.device) for i_step in range(self.n_steps): in_states = [] diff --git a/onmt/encoders/image_encoder.py b/onmt/encoders/image_encoder.py deleted file mode 100644 index 07b1fd1ce6..0000000000 --- a/onmt/encoders/image_encoder.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Image Encoder.""" -import torch.nn as nn -import torch.nn.functional as F -import torch - -from onmt.encoders.encoder import EncoderBase - - -class ImageEncoder(EncoderBase): - """A simple encoder CNN -> RNN for image src. - - Args: - num_layers (int): number of encoder layers. - bidirectional (bool): bidirectional encoder. - rnn_size (int): size of hidden states of the rnn. - dropout (float): dropout probablity. - """ - - def __init__(self, num_layers, bidirectional, rnn_size, dropout, - image_chanel_size=3): - super(ImageEncoder, self).__init__() - self.num_layers = num_layers - self.num_directions = 2 if bidirectional else 1 - self.hidden_size = rnn_size - - self.layer1 = nn.Conv2d(image_chanel_size, 64, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - self.layer2 = nn.Conv2d(64, 128, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - self.layer3 = nn.Conv2d(128, 256, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - self.layer4 = nn.Conv2d(256, 256, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - self.layer5 = nn.Conv2d(256, 512, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - self.layer6 = nn.Conv2d(512, 512, kernel_size=(3, 3), - padding=(1, 1), stride=(1, 1)) - - self.batch_norm1 = nn.BatchNorm2d(256) - self.batch_norm2 = nn.BatchNorm2d(512) - self.batch_norm3 = nn.BatchNorm2d(512) - - src_size = 512 - dropout = dropout[0] if type(dropout) is list else dropout - self.rnn = nn.LSTM(src_size, int(rnn_size / self.num_directions), - num_layers=num_layers, - dropout=dropout, - bidirectional=bidirectional) - self.pos_lut = nn.Embedding(1000, src_size) - - @classmethod - def from_opt(cls, opt, embeddings=None): - """Alternate constructor.""" - if embeddings is not None: - raise ValueError("Cannot use embeddings with ImageEncoder.") - # why is the model_opt.__dict__ check necessary? - if "image_channel_size" not in opt.__dict__: - image_channel_size = 3 - else: - image_channel_size = opt.image_channel_size - return cls( - opt.enc_layers, - opt.brnn, - opt.enc_rnn_size, - opt.dropout[0] if type(opt.dropout) is list else opt.dropout, - image_channel_size - ) - - def load_pretrained_vectors(self, opt): - """Pass in needed options only when modify function definition.""" - pass - - def forward(self, src, lengths=None): - """See :func:`onmt.encoders.encoder.EncoderBase.forward()`""" - - batch_size = src.size(0) - # (batch_size, 64, imgH, imgW) - # layer 1 - src = F.relu(self.layer1(src[:, :, :, :] - 0.5), True) - - # (batch_size, 64, imgH/2, imgW/2) - src = F.max_pool2d(src, kernel_size=(2, 2), stride=(2, 2)) - - # (batch_size, 128, imgH/2, imgW/2) - # layer 2 - src = F.relu(self.layer2(src), True) - - # (batch_size, 128, imgH/2/2, imgW/2/2) - src = F.max_pool2d(src, kernel_size=(2, 2), stride=(2, 2)) - - # (batch_size, 256, imgH/2/2, imgW/2/2) - # layer 3 - # batch norm 1 - src = F.relu(self.batch_norm1(self.layer3(src)), True) - - # (batch_size, 256, imgH/2/2, imgW/2/2) - # layer4 - src = F.relu(self.layer4(src), True) - - # (batch_size, 256, imgH/2/2/2, imgW/2/2) - src = F.max_pool2d(src, kernel_size=(1, 2), stride=(1, 2)) - - # (batch_size, 512, imgH/2/2/2, imgW/2/2) - # layer 5 - # batch norm 2 - src = F.relu(self.batch_norm2(self.layer5(src)), True) - - # (batch_size, 512, imgH/2/2/2, imgW/2/2/2) - src = F.max_pool2d(src, kernel_size=(2, 1), stride=(2, 1)) - - # (batch_size, 512, imgH/2/2/2, imgW/2/2/2) - src = F.relu(self.batch_norm3(self.layer6(src)), True) - - # # (batch_size, 512, H, W) - all_outputs = [] - for row in range(src.size(2)): - inp = src[:, :, row, :].transpose(0, 2) \ - .transpose(1, 2) - row_vec = torch.Tensor(batch_size).type_as(inp.data) \ - .long().fill_(row) - pos_emb = self.pos_lut(row_vec) - with_pos = torch.cat( - (pos_emb.view(1, pos_emb.size(0), pos_emb.size(1)), inp), 0) - outputs, hidden_t = self.rnn(with_pos) - all_outputs.append(outputs) - out = torch.cat(all_outputs, 0) - - return hidden_t, out, lengths - - def update_dropout(self, dropout): - self.rnn.dropout = dropout diff --git a/onmt/inputters/__init__.py b/onmt/inputters/__init__.py index 97494d3023..ed9aba8999 100644 --- a/onmt/inputters/__init__.py +++ b/onmt/inputters/__init__.py @@ -3,27 +3,18 @@ Inputters implement the logic of transforming raw data to vectorized inputs, e.g., from a line of text to a sequence of embeddings. """ -from onmt.inputters.inputter import \ - load_old_vocab, get_fields, OrderedIterator, \ - build_vocab, old_style_vocab, filter_example +from onmt.inputters.inputter import get_fields, build_vocab, filter_example +from onmt.inputters.iterator import max_tok_len, OrderedIterator from onmt.inputters.dataset_base import Dataset from onmt.inputters.text_dataset import text_sort_key, TextDataReader -from onmt.inputters.image_dataset import img_sort_key, ImageDataReader -from onmt.inputters.audio_dataset import audio_sort_key, AudioDataReader -from onmt.inputters.vec_dataset import vec_sort_key, VecDataReader from onmt.inputters.datareader_base import DataReaderBase str2reader = { - "text": TextDataReader, "img": ImageDataReader, "audio": AudioDataReader, - "vec": VecDataReader} + "text": TextDataReader} str2sortkey = { - 'text': text_sort_key, 'img': img_sort_key, 'audio': audio_sort_key, - 'vec': vec_sort_key} + 'text': text_sort_key} -__all__ = ['Dataset', 'load_old_vocab', 'get_fields', 'DataReaderBase', - 'filter_example', 'old_style_vocab', - 'build_vocab', 'OrderedIterator', - 'text_sort_key', 'img_sort_key', 'audio_sort_key', 'vec_sort_key', - 'TextDataReader', 'ImageDataReader', 'AudioDataReader', - 'VecDataReader'] +__all__ = ['Dataset', 'get_fields', 'DataReaderBase', 'filter_example', + 'build_vocab', 'OrderedIterator', 'max_tok_len', + 'text_sort_key', 'TextDataReader'] diff --git a/onmt/inputters/audio_dataset.py b/onmt/inputters/audio_dataset.py deleted file mode 100644 index 2cd63f064e..0000000000 --- a/onmt/inputters/audio_dataset.py +++ /dev/null @@ -1,223 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from tqdm import tqdm - -import torch -from torchtext.data import Field - -from onmt.inputters.datareader_base import DataReaderBase - -# imports of datatype-specific dependencies -try: - import torchaudio - import librosa - import numpy as np -except ImportError: - torchaudio, librosa, np = None, None, None - - -class AudioDataReader(DataReaderBase): - """Read audio data from disk. - - Args: - sample_rate (int): sample_rate. - window_size (float) : window size for spectrogram in seconds. - window_stride (float): window stride for spectrogram in seconds. - window (str): window type for spectrogram generation. See - :func:`librosa.stft()` ``window`` for more details. - normalize_audio (bool): subtract spectrogram by mean and divide - by std or not. - truncate (int or NoneType): maximum audio length - (0 or None for unlimited). - - Raises: - onmt.inputters.datareader_base.MissingDependencyException: If - importing any of ``torchaudio``, ``librosa``, or ``numpy`` fail. - """ - - def __init__(self, sample_rate=0, window_size=0, window_stride=0, - window=None, normalize_audio=True, truncate=None): - self._check_deps() - self.sample_rate = sample_rate - self.window_size = window_size - self.window_stride = window_stride - self.window = window - self.normalize_audio = normalize_audio - self.truncate = truncate - - @classmethod - def from_opt(cls, opt): - return cls(sample_rate=opt.sample_rate, window_size=opt.window_size, - window_stride=opt.window_stride, window=opt.window) - - @classmethod - def _check_deps(cls): - if any([torchaudio is None, librosa is None, np is None]): - cls._raise_missing_dep( - "torchaudio", "librosa", "numpy") - - def extract_features(self, audio_path): - # torchaudio loading options recently changed. It's probably - # straightforward to rewrite the audio handling to make use of - # up-to-date torchaudio, but in the meantime there is a legacy - # method which uses the old defaults - sound, sample_rate_ = torchaudio.legacy.load(audio_path) - if self.truncate and self.truncate > 0: - if sound.size(0) > self.truncate: - sound = sound[:self.truncate] - - assert sample_rate_ == self.sample_rate, \ - 'Sample rate of %s != -sample_rate (%d vs %d)' \ - % (audio_path, sample_rate_, self.sample_rate) - - sound = sound.numpy() - if len(sound.shape) > 1: - if sound.shape[1] == 1: - sound = sound.squeeze() - else: - sound = sound.mean(axis=1) # average multiple channels - - n_fft = int(self.sample_rate * self.window_size) - win_length = n_fft - hop_length = int(self.sample_rate * self.window_stride) - # STFT - d = librosa.stft(sound, n_fft=n_fft, hop_length=hop_length, - win_length=win_length, window=self.window) - spect, _ = librosa.magphase(d) - spect = np.log1p(spect) - spect = torch.FloatTensor(spect) - if self.normalize_audio: - mean = spect.mean() - std = spect.std() - spect.add_(-mean) - spect.div_(std) - return spect - - def read(self, data, side, src_dir=None): - """Read data into dicts. - - Args: - data (str or Iterable[str]): Sequence of audio paths or - path to file containing audio paths. - In either case, the filenames may be relative to ``src_dir`` - (default behavior) or absolute. - side (str): Prefix used in return dict. Usually - ``"src"`` or ``"tgt"``. - src_dir (str): Location of source audio files. See ``data``. - - Yields: - A dictionary containing audio data for each line. - """ - - assert src_dir is not None and os.path.exists(src_dir),\ - "src_dir must be a valid directory if data_type is audio" - - if isinstance(data, str): - data = DataReaderBase._read_file(data) - - for i, line in enumerate(tqdm(data)): - line = line.decode("utf-8").strip() - audio_path = os.path.join(src_dir, line) - if not os.path.exists(audio_path): - audio_path = line - - assert os.path.exists(audio_path), \ - 'audio path %s not found' % line - - spect = self.extract_features(audio_path) - yield {side: spect, side + '_path': line, 'indices': i} - - -def audio_sort_key(ex): - """Sort using duration time of the sound spectrogram.""" - return ex.src.size(1) - - -class AudioSeqField(Field): - """Defines an audio datatype and instructions for converting to Tensor. - - See :class:`Fields` for attribute descriptions. - """ - - def __init__(self, preprocessing=None, postprocessing=None, - include_lengths=False, batch_first=False, pad_index=0, - is_target=False): - super(AudioSeqField, self).__init__( - sequential=True, use_vocab=False, init_token=None, - eos_token=None, fix_length=False, dtype=torch.float, - preprocessing=preprocessing, postprocessing=postprocessing, - lower=False, tokenize=None, include_lengths=include_lengths, - batch_first=batch_first, pad_token=pad_index, unk_token=None, - pad_first=False, truncate_first=False, stop_words=None, - is_target=is_target - ) - - def pad(self, minibatch): - """Pad a batch of examples to the length of the longest example. - - Args: - minibatch (List[torch.FloatTensor]): A list of audio data, - each having shape 1 x n_feats x len where len is variable. - - Returns: - torch.FloatTensor or Tuple[torch.FloatTensor, List[int]]: The - padded tensor of shape ``(batch_size, 1, n_feats, max_len)``. - and a list of the lengths if `self.include_lengths` is `True` - else just returns the padded tensor. - """ - - assert not self.pad_first and not self.truncate_first \ - and not self.fix_length and self.sequential - minibatch = list(minibatch) - lengths = [x.size(1) for x in minibatch] - max_len = max(lengths) - nfft = minibatch[0].size(0) - sounds = torch.full((len(minibatch), 1, nfft, max_len), - self.pad_token, dtype=self.dtype) - for i, (spect, len_) in enumerate(zip(minibatch, lengths)): - sounds[i, :, :, 0:len_] = spect - if self.include_lengths: - return (sounds, lengths) - return sounds - - def numericalize(self, arr, device=None): - """Turn a batch of examples that use this field into a Variable. - - If the field has ``include_lengths=True``, a tensor of lengths will be - included in the return value. - - Args: - arr (torch.FloatTensor or Tuple(torch.FloatTensor, List[int])): - List of tokenized and padded examples, or tuple of List of - tokenized and padded examples and List of lengths of each - example if self.include_lengths is True. Examples have shape - ``(batch_size, 1, n_feats, max_len)`` if `self.batch_first` - else ``(max_len, batch_size, 1, n_feats)``. - device (str or torch.device): See `Field.numericalize`. - """ - - assert self.use_vocab is False - if self.include_lengths and not isinstance(arr, tuple): - raise ValueError("Field has include_lengths set to True, but " - "input data is not a tuple of " - "(data batch, batch lengths).") - if isinstance(arr, tuple): - arr, lengths = arr - lengths = torch.tensor(lengths, dtype=torch.int, device=device) - - if self.postprocessing is not None: - arr = self.postprocessing(arr, None) - - if self.sequential and not self.batch_first: - arr = arr.permute(3, 0, 1, 2) - if self.sequential: - arr = arr.contiguous() - arr = arr.to(device) - if self.include_lengths: - return arr, lengths - return arr - - -def audio_fields(**kwargs): - audio = AudioSeqField(pad_index=0, batch_first=True, include_lengths=True) - return audio diff --git a/onmt/inputters/corpus.py b/onmt/inputters/corpus.py new file mode 100644 index 0000000000..73e5b855ed --- /dev/null +++ b/onmt/inputters/corpus.py @@ -0,0 +1,237 @@ +"""Module that contain shard utils for dynamic data.""" +import os +from onmt.utils.logging import logger +from onmt.constants import CorpusName +from onmt.transforms import TransformPipe + +from collections import Counter +from contextlib import contextmanager + + +@contextmanager +def exfile_open(filename, *args, **kwargs): + """Extended file opener enables open(filename=None). + + This context manager enables open(filename=None) as well as regular file. + filename None will produce endlessly None for each iterate, + while filename with valid path will produce lines as usual. + + Args: + filename (str|None): a valid file path or None; + *args: args relate to open file using codecs; + **kwargs: kwargs relate to open file using codecs. + + Yields: + `None` repeatly if filename==None, + else yield from file specified in `filename`. + """ + if filename is None: + from itertools import repeat + _file = repeat(None) + else: + import codecs + _file = codecs.open(filename, *args, **kwargs) + yield _file + if filename is not None and _file: + _file.close() + + +class ParallelCorpus(object): + """A parallel corpus file pair that can be loaded to iterate.""" + + def __init__(self, name, src, tgt, align=None): + """Initialize src & tgt side file path.""" + self.id = name + self.src = src + self.tgt = tgt + self.align = align + + def load(self, offset=0, stride=1): + """ + Load file and iterate by lines. + `offset` and `stride` allow to iterate only on every + `stride` example, starting from `offset`. + """ + with exfile_open(self.src, mode='rb') as fs,\ + exfile_open(self.tgt, mode='rb') as ft,\ + exfile_open(self.align, mode='rb') as fa: + logger.info(f"Loading {repr(self)}...") + for i, (sline, tline, align) in enumerate(zip(fs, ft, fa)): + if (i % stride) == offset: + sline = sline.decode('utf-8') + tline = tline.decode('utf-8') + example = { + 'src': sline, + 'tgt': tline + } + if align is not None: + example['align'] = align.decode('utf-8') + yield example + + def __repr__(self): + cls_name = type(self).__name__ + return '{}({}, {}, align={})'.format( + cls_name, self.src, self.tgt, self.align) + + +def get_corpora(opts, is_train=False): + corpora_dict = {} + if is_train: + for corpus_id, corpus_dict in opts.data.items(): + if corpus_id != CorpusName.VALID: + corpora_dict[corpus_id] = ParallelCorpus( + corpus_id, + corpus_dict["path_src"], + corpus_dict["path_tgt"], + corpus_dict["path_align"]) + else: + if CorpusName.VALID in opts.data.keys(): + corpora_dict[CorpusName.VALID] = ParallelCorpus( + CorpusName.VALID, + opts.data[CorpusName.VALID]["path_src"], + opts.data[CorpusName.VALID]["path_tgt"], + opts.data[CorpusName.VALID]["path_align"]) + else: + return None + return corpora_dict + + +class ParallelCorpusIterator(object): + """An iterator dedicate for ParallelCorpus. + + Args: + corpus (ParallelCorpus): corpus to iterate; + transform (Transform): transforms to be applied to corpus; + infinitely (bool): True to iterate endlessly; + skip_empty_level (str): security level when encouter empty line; + stride (int): iterate corpus with this line stride; + offset (int): iterate corpus with this line offset. + """ + + def __init__(self, corpus, transform, infinitely=False, + skip_empty_level='warning', stride=1, offset=0): + self.cid = corpus.id + self.corpus = corpus + self.transform = transform + self.infinitely = infinitely + if skip_empty_level not in ['silent', 'warning', 'error']: + raise ValueError( + f"Invalid argument skip_empty_level={skip_empty_level}") + self.skip_empty_level = skip_empty_level + self.stride = stride + self.offset = offset + + def _tokenize(self, stream): + for example in stream: + src = example['src'].strip('\n').split() + tgt = example['tgt'].strip('\n').split() + example['src'], example['tgt'] = src, tgt + if 'align' in example: + example['align'] = example['align'].strip('\n').split() + yield example + + def _transform(self, stream): + for example in stream: + # NOTE: moved to DatasetAdapter._process method in iterator.py + # item = self.transform.apply( + # example, is_train=self.infinitely, corpus_name=self.cid) + item = (example, self.transform, self.cid) + if item is not None: + yield item + report_msg = self.transform.stats() + if report_msg != '': + logger.info("Transform statistics for {}:\n{}".format( + self.cid, report_msg)) + + def _add_index(self, stream): + for i, item in enumerate(stream): + example = item[0] + line_number = i * self.stride + self.offset + example['indices'] = line_number + if (len(example['src']) == 0 or len(example['tgt']) == 0 or + ('align' in example and example['align'] == 0)): + # empty example: skip + empty_msg = f"Empty line exists in {self.cid}#{line_number}." + if self.skip_empty_level == 'error': + raise IOError(empty_msg) + elif self.skip_empty_level == 'warning': + logger.warning(empty_msg) + continue + yield item + + def _iter_corpus(self): + corpus_stream = self.corpus.load( + stride=self.stride, offset=self.offset) + tokenized_corpus = self._tokenize(corpus_stream) + transformed_corpus = self._transform(tokenized_corpus) + indexed_corpus = self._add_index(transformed_corpus) + yield from indexed_corpus + + def __iter__(self): + if self.infinitely: + while True: + _iter = self._iter_corpus() + yield from _iter + else: + yield from self._iter_corpus() + + +def build_corpora_iters(corpora, transforms, corpora_info, is_train=False, + skip_empty_level='warning', stride=1, offset=0): + """Return `ParallelCorpusIterator` for all corpora defined in opts.""" + corpora_iters = dict() + for c_id, corpus in corpora.items(): + c_transform_names = corpora_info[c_id].get('transforms', []) + corpus_transform = [transforms[name] for name in c_transform_names] + transform_pipe = TransformPipe.build_from(corpus_transform) + logger.info(f"{c_id}'s transforms: {str(transform_pipe)}") + corpus_iter = ParallelCorpusIterator( + corpus, transform_pipe, infinitely=is_train, + skip_empty_level=skip_empty_level, stride=stride, offset=offset) + corpora_iters[c_id] = corpus_iter + return corpora_iters + + +def save_transformed_sample(opts, transforms, n_sample=3, build_vocab=False): + """Save transformed data sample as specified in opts.""" + + if n_sample == -1: + logger.info(f"n_sample={n_sample}: Save full transformed corpus.") + elif n_sample == 0: + logger.info(f"n_sample={n_sample}: no sample will be saved.") + return + elif n_sample > 0: + logger.info(f"Save {n_sample} transformed example/corpus.") + else: + raise ValueError(f"n_sample should >= -1, get {n_sample}.") + + from onmt.inputters.dynamic_iterator import DatasetAdapter + corpora = get_corpora(opts, is_train=True) + if build_vocab: + counter_src = Counter() + counter_tgt = Counter() + datasets_iterables = build_corpora_iters( + corpora, transforms, opts.data, is_train=False, + skip_empty_level=opts.skip_empty_level) + sample_path = os.path.join( + os.path.dirname(opts.save_data), CorpusName.SAMPLE) + os.makedirs(sample_path, exist_ok=True) + for c_name, c_iter in datasets_iterables.items(): + dest_base = os.path.join( + sample_path, "{}.{}".format(c_name, CorpusName.SAMPLE)) + with open(dest_base + ".src", 'w', encoding="utf-8") as f_src,\ + open(dest_base + ".tgt", 'w', encoding="utf-8") as f_tgt: + for i, item in enumerate(c_iter): + maybe_example = DatasetAdapter._process(item, is_train=True) + if maybe_example is None: + continue + src_line, tgt_line = maybe_example['src'], maybe_example['tgt'] + if build_vocab: + counter_src.update(src_line.split(' ')) + counter_tgt.update(tgt_line.split(' ')) + f_src.write(src_line + '\n') + f_tgt.write(tgt_line + '\n') + if n_sample > 0 and i >= n_sample: + break + if build_vocab: + return counter_src, counter_tgt diff --git a/onmt/inputters/datareader_base.py b/onmt/inputters/datareader_base.py index a8516385ae..8a7e09adb5 100644 --- a/onmt/inputters/datareader_base.py +++ b/onmt/inputters/datareader_base.py @@ -40,6 +40,6 @@ def _raise_missing_dep(*missing_deps): "Could not create reader. Be sure to install " "the following dependencies: " + ", ".join(missing_deps)) - def read(self, data, side, src_dir): + def read(self, data, side): """Read data from file system and yield as dicts.""" raise NotImplementedError() diff --git a/onmt/inputters/dataset_base.py b/onmt/inputters/dataset_base.py index 2a61722c62..5aa909e2d1 100644 --- a/onmt/inputters/dataset_base.py +++ b/onmt/inputters/dataset_base.py @@ -38,7 +38,7 @@ def _dynamic_dict(example, src_field, tgt_field): tgt_field (torchtext.data.Field): Field object. Returns: - torchtext.data.Vocab and ``example``, changed as described. + ``example``, changed as described. """ src = src_field.tokenize(example["src"]) @@ -57,7 +57,7 @@ def _dynamic_dict(example, src_field, tgt_field): mask = torch.LongTensor( [unk_idx] + [src_ex_vocab.stoi[w] for w in tgt] + [unk_idx]) example["alignment"] = mask - return src_ex_vocab, example + return example class Dataset(TorchtextDataset): @@ -92,8 +92,6 @@ class Dataset(TorchtextDataset): where ``data_arg`` is passed to the ``read()`` method of the reader in ``readers`` at that position. (See the reader object for details on the ``Any`` type.) - dirs (Iterable[str or NoneType]): A list of directories where - data is contained. See the reader object for more details. sort_key (Callable[[torchtext.data.Example], Any]): A function for determining the value on which data is sorted (i.e. length). filter_pred (Callable[[torchtext.data.Example], bool]): A function @@ -107,29 +105,23 @@ class Dataset(TorchtextDataset): predict to copy them. """ - def __init__(self, fields, readers, data, dirs, sort_key, - filter_pred=None, corpus_id=None): + def __init__(self, fields, readers, data, sort_key, filter_pred=None): self.sort_key = sort_key can_copy = 'src_map' in fields and 'alignment' in fields - read_iters = [r.read(dat[1], dat[0], dir_) for r, dat, dir_ - in zip(readers, data, dirs)] + read_iters = [r.read(dat[1], dat[0]) for r, dat in zip(readers, data)] # self.src_vocabs is used in collapse_copy_scores and Translator.py self.src_vocabs = [] examples = [] for ex_dict in starmap(_join_dicts, zip(*read_iters)): - if corpus_id is not None: - ex_dict["corpus_id"] = corpus_id - else: - ex_dict["corpus_id"] = "train" if can_copy: src_field = fields['src'] tgt_field = fields['tgt'] # this assumes src_field and tgt_field are both text - src_ex_vocab, ex_dict = _dynamic_dict( + ex_dict = _dynamic_dict( ex_dict, src_field.base_field, tgt_field.base_field) - self.src_vocabs.append(src_ex_vocab) + self.src_vocabs.append(ex_dict["src_ex_vocab"]) ex_fields = {k: [(k, v)] for k, v in fields.items() if k in ex_dict} ex = Example.fromdict(ex_dict, ex_fields) @@ -159,10 +151,9 @@ def save(self, path, remove_fields=True): @staticmethod def config(fields): - readers, data, dirs = [], [], [] + readers, data = [], [] for name, field in fields: if field["data"] is not None: readers.append(field["reader"]) data.append((name, field["data"])) - dirs.append(field["dir"]) - return readers, data, dirs + return readers, data diff --git a/onmt/inputters/dynamic_iterator.py b/onmt/inputters/dynamic_iterator.py new file mode 100644 index 0000000000..72887ca300 --- /dev/null +++ b/onmt/inputters/dynamic_iterator.py @@ -0,0 +1,266 @@ +"""Module that contain iterator used for dynamic data.""" +from itertools import cycle + +from torchtext.data import Dataset as TorchtextDataset, \ + Example as TorchtextExample, batch as torchtext_batch +from onmt.inputters import str2sortkey, max_tok_len, OrderedIterator +from onmt.inputters.dataset_base import _dynamic_dict +from onmt.inputters.corpus import get_corpora, build_corpora_iters +from onmt.transforms import make_transforms + + +class DatasetAdapter(object): + """Adapte a buckets of tuples into examples of a torchtext Dataset.""" + + valid_field_name = ( + 'src', 'tgt', 'indices', 'src_map', 'src_ex_vocab', 'alignment', + 'align') + + def __init__(self, fields, is_train): + self.fields_dict = self._valid_fields(fields) + self.is_train = is_train + + @classmethod + def _valid_fields(cls, fields): + """Return valid fields in dict format.""" + return { + f_k: f_v for f_k, f_v in fields.items() + if f_k in cls.valid_field_name + } + + @staticmethod + def _process(item, is_train): + """Return valid transformed example from `item`.""" + example, transform, cid = item + # this is a hack: appears quicker to apply it here + # than in the ParallelCorpusIterator + maybe_example = transform.apply( + example, is_train=is_train, corpus_name=cid) + if maybe_example is None: + return None + maybe_example['src'] = ' '.join(maybe_example['src']) + maybe_example['tgt'] = ' '.join(maybe_example['tgt']) + if 'align' in maybe_example: + maybe_example['align'] = ' '.join(maybe_example['align']) + return maybe_example + + def _maybe_add_dynamic_dict(self, example, fields): + """maybe update `example` with dynamic_dict related fields.""" + if 'src_map' in fields and 'alignment' in fields: + example = _dynamic_dict( + example, + fields['src'].base_field, + fields['tgt'].base_field) + return example + + def _to_examples(self, bucket, is_train=False): + examples = [] + for item in bucket: + maybe_example = self._process(item, is_train=is_train) + if maybe_example is not None: + example = self._maybe_add_dynamic_dict( + maybe_example, self.fields_dict) + ex_fields = {k: [(k, v)] for k, v in self.fields_dict.items() + if k in example} + ex = TorchtextExample.fromdict(example, ex_fields) + examples.append(ex) + return examples + + def __call__(self, bucket): + examples = self._to_examples(bucket, is_train=self.is_train) + dataset = TorchtextDataset(examples, self.fields_dict) + return dataset + + +class MixingStrategy(object): + """Mixing strategy that should be used in Data Iterator.""" + + def __init__(self, iterables, weights): + """Initilize neccessary attr.""" + self._valid_iterable(iterables, weights) + self.iterables = iterables + self.weights = weights + + def _valid_iterable(self, iterables, weights): + iter_keys = iterables.keys() + weight_keys = weights.keys() + if iter_keys != weight_keys: + raise ValueError( + f"keys in {iterables} & {iterables} should be equal.") + + def __iter__(self): + raise NotImplementedError + + +class SequentialMixer(MixingStrategy): + """Generate data sequentially from `iterables` which is exhaustible.""" + + def _iter_datasets(self): + for ds_name, ds_weight in self.weights.items(): + for _ in range(ds_weight): + yield ds_name + + def __iter__(self): + for ds_name in self._iter_datasets(): + iterable = self.iterables[ds_name] + yield from iterable + + +class WeightedMixer(MixingStrategy): + """A mixing strategy that mix data weightedly and iterate infinitely.""" + + def __init__(self, iterables, weights): + super().__init__(iterables, weights) + self._iterators = { + ds_name: iter(generator) + for ds_name, generator in self.iterables.items() + } + + def _reset_iter(self, ds_name): + self._iterators[ds_name] = iter(self.iterables[ds_name]) + + def _iter_datasets(self): + for ds_name, ds_weight in self.weights.items(): + for _ in range(ds_weight): + yield ds_name + + def __iter__(self): + for ds_name in cycle(self._iter_datasets()): + iterator = self._iterators[ds_name] + try: + item = next(iterator) + except StopIteration: + self._reset_iter(ds_name) + iterator = self._iterators[ds_name] + item = next(iterator) + finally: + yield item + + +class DynamicDatasetIter(object): + """Yield batch from (multiple) plain text corpus. + + Args: + corpora (dict[str, ParallelCorpus]): collections of corpora to iterate; + corpora_info (dict[str, dict]): corpora infos correspond to corpora; + transforms (dict[str, Transform]): transforms may be used by corpora; + fields (dict[str, Field]): fields dict for convert corpora into Tensor; + is_train (bool): True when generate data for training; + batch_type (str): batching type to count on, choices=[tokens, sents]; + batch_size (int): numbers of examples in a batch; + batch_size_multiple (int): make batch size multiply of this; + data_type (str): input data type, currently only text; + bucket_size (int): accum this number of examples in a dynamic dataset; + pool_factor (int): accum this number of batch before sorting; + skip_empty_level (str): security level when encouter empty line; + stride (int): iterate data files with this stride; + offset (int): iterate data files with this offset. + + Attributes: + batch_size_fn (function): functions to calculate batch_size; + sort_key (function): functions define how to sort examples; + dataset_adapter (DatasetAdapter): organize raw corpus to tensor adapt; + mixer (MixingStrategy): the strategy to iterate corpora. + """ + + def __init__(self, corpora, corpora_info, transforms, fields, is_train, + batch_type, batch_size, batch_size_multiple, data_type="text", + bucket_size=2048, pool_factor=8192, + skip_empty_level='warning', stride=1, offset=0): + self.corpora = corpora + self.transforms = transforms + self.fields = fields + self.corpora_info = corpora_info + self.is_train = is_train + self.init_iterators = False + self.batch_size = batch_size + self.batch_size_fn = max_tok_len \ + if is_train and batch_type == "tokens" else None + self.batch_size_multiple = batch_size_multiple + self.device = 'cpu' + self.sort_key = str2sortkey[data_type] + self.bucket_size = bucket_size + self.pool_factor = pool_factor + if stride <= 0: + raise ValueError(f"Invalid argument for stride={stride}.") + self.stride = stride + self.offset = offset + if skip_empty_level not in ['silent', 'warning', 'error']: + raise ValueError( + f"Invalid argument skip_empty_level={skip_empty_level}") + self.skip_empty_level = skip_empty_level + + @classmethod + def from_opts(cls, corpora, transforms, fields, opts, is_train, + stride=1, offset=0): + """Initilize `DynamicDatasetIter` with options parsed from `opts`.""" + batch_size = opts.batch_size if is_train else opts.valid_batch_size + if opts.batch_size_multiple is not None: + batch_size_multiple = opts.batch_size_multiple + else: + batch_size_multiple = 8 if opts.model_dtype == "fp16" else 1 + return cls( + corpora, opts.data, transforms, fields, is_train, opts.batch_type, + batch_size, batch_size_multiple, data_type=opts.data_type, + bucket_size=opts.bucket_size, pool_factor=opts.pool_factor, + skip_empty_level=opts.skip_empty_level, + stride=stride, offset=offset + ) + + def _init_datasets(self): + datasets_iterables = build_corpora_iters( + self.corpora, self.transforms, + self.corpora_info, self.is_train, + skip_empty_level=self.skip_empty_level, + stride=self.stride, offset=self.offset) + self.dataset_adapter = DatasetAdapter(self.fields, self.is_train) + datasets_weights = { + ds_name: int(self.corpora_info[ds_name]['weight']) + for ds_name in datasets_iterables.keys() + } + if self.is_train: + self.mixer = WeightedMixer(datasets_iterables, datasets_weights) + else: + self.mixer = SequentialMixer(datasets_iterables, datasets_weights) + self.init_iterators = True + + def _bucketing(self): + buckets = torchtext_batch( + self.mixer, + batch_size=self.bucket_size, + batch_size_fn=None) + yield from buckets + + def __iter__(self): + if self.init_iterators is False: + self._init_datasets() + for bucket in self._bucketing(): + dataset = self.dataset_adapter(bucket) + train_iter = OrderedIterator( + dataset, + self.batch_size, + pool_factor=self.pool_factor, + batch_size_fn=self.batch_size_fn, + batch_size_multiple=self.batch_size_multiple, + device=self.device, + train=self.is_train, + sort=False, + sort_within_batch=True, + sort_key=self.sort_key, + repeat=False, + ) + for batch in train_iter: + yield batch + + +def build_dynamic_dataset_iter(fields, transforms_cls, opts, is_train=True, + stride=1, offset=0): + """Build `DynamicDatasetIter` from fields & opts.""" + transforms = make_transforms(opts, transforms_cls, fields) + corpora = get_corpora(opts, is_train) + if corpora is None: + assert not is_train, "only valid corpus is ignorable." + return None + return DynamicDatasetIter.from_opts( + corpora, transforms, fields, opts, is_train, + stride=stride, offset=offset) diff --git a/onmt/inputters/fields.py b/onmt/inputters/fields.py new file mode 100644 index 0000000000..89fe32eb1e --- /dev/null +++ b/onmt/inputters/fields.py @@ -0,0 +1,83 @@ +"""Module for build dynamic fields.""" +from collections import Counter, defaultdict +import os +import torch +from onmt.utils.logging import logger +from onmt.utils.misc import check_path +from onmt.inputters.inputter import get_fields, _load_vocab, \ + _build_fields_vocab + + +def _get_dynamic_fields(opts): + # NOTE: not support nfeats > 0 yet + src_nfeats = 0 + tgt_nfeats = 0 + with_align = hasattr(opts, 'lambda_align') and opts.lambda_align > 0.0 + fields = get_fields('text', src_nfeats, tgt_nfeats, + dynamic_dict=opts.copy_attn, + src_truncate=opts.src_seq_length_trunc, + tgt_truncate=opts.tgt_seq_length_trunc, + with_align=with_align) + + return fields + + +def build_dynamic_fields(opts, src_specials=None, tgt_specials=None): + """Build fields for dynamic, including load & build vocab.""" + fields = _get_dynamic_fields(opts) + + counters = defaultdict(Counter) + logger.info("Loading vocab from text file...") + + _src_vocab, _src_vocab_size = _load_vocab( + opts.src_vocab, 'src', counters, + min_freq=opts.src_words_min_frequency) + + if opts.tgt_vocab: + _tgt_vocab, _tgt_vocab_size = _load_vocab( + opts.tgt_vocab, 'tgt', counters, + min_freq=opts.tgt_words_min_frequency) + elif opts.share_vocab: + logger.info("Sharing src vocab to tgt...") + counters['tgt'] = counters['src'] + else: + raise ValueError("-tgt_vocab should be specified if not share_vocab.") + + logger.info("Building fields with vocab in counters...") + fields = _build_fields_vocab( + fields, counters, 'text', opts.share_vocab, + opts.vocab_size_multiple, + opts.src_vocab_size, opts.src_words_min_frequency, + opts.tgt_vocab_size, opts.tgt_words_min_frequency, + src_specials=src_specials, tgt_specials=tgt_specials) + + return fields + + +def get_vocabs(fields): + """Get a dict contain src & tgt vocab list extracted from fields.""" + src_vocab = fields['src'].base_field.vocab.itos + tgt_vocab = fields['tgt'].base_field.vocab.itos + vocabs = {'src': src_vocab, 'tgt': tgt_vocab} + return vocabs + + +def save_fields(fields, save_data, overwrite=True): + """Dump `fields` object.""" + fields_path = "{}.vocab.pt".format(save_data) + os.makedirs(os.path.dirname(fields_path), exist_ok=True) + check_path(fields_path, exist_ok=overwrite, log=logger.warning) + logger.info(f"Saving fields to {fields_path}...") + torch.save(fields, fields_path) + + +def load_fields(save_data, checkpoint=None): + """Load dumped fields object from `save_data` or `checkpoint` if any.""" + if checkpoint is not None: + logger.info(f"Loading fields from checkpoint...") + fields = checkpoint['vocab'] + else: + fields_path = "{}.vocab.pt".format(save_data) + logger.info(f"Loading fields from {fields_path}...") + fields = torch.load(fields_path) + return fields diff --git a/onmt/inputters/image_dataset.py b/onmt/inputters/image_dataset.py deleted file mode 100644 index 1a21ae64e8..0000000000 --- a/onmt/inputters/image_dataset.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 -*- - -import os - -import torch -from torchtext.data import Field - -from onmt.inputters.datareader_base import DataReaderBase - -# domain specific dependencies -try: - from PIL import Image - from torchvision import transforms - import cv2 -except ImportError: - Image, transforms, cv2 = None, None, None - - -class ImageDataReader(DataReaderBase): - """Read image data from disk. - - Args: - truncate (tuple[int] or NoneType): maximum img size. Use - ``(0,0)`` or ``None`` for unlimited. - channel_size (int): Number of channels per image. - - Raises: - onmt.inputters.datareader_base.MissingDependencyException: If - importing any of ``PIL``, ``torchvision``, or ``cv2`` fail. - """ - - def __init__(self, truncate=None, channel_size=3): - self._check_deps() - self.truncate = truncate - self.channel_size = channel_size - - @classmethod - def from_opt(cls, opt): - return cls(channel_size=opt.image_channel_size) - - @classmethod - def _check_deps(cls): - if any([Image is None, transforms is None, cv2 is None]): - cls._raise_missing_dep( - "PIL", "torchvision", "cv2") - - def read(self, images, side, img_dir=None): - """Read data into dicts. - - Args: - images (str or Iterable[str]): Sequence of image paths or - path to file containing audio paths. - In either case, the filenames may be relative to ``src_dir`` - (default behavior) or absolute. - side (str): Prefix used in return dict. Usually - ``"src"`` or ``"tgt"``. - img_dir (str): Location of source image files. See ``images``. - - Yields: - a dictionary containing image data, path and index for each line. - """ - if isinstance(images, str): - images = DataReaderBase._read_file(images) - - for i, filename in enumerate(images): - filename = filename.decode("utf-8").strip() - img_path = os.path.join(img_dir, filename) - if not os.path.exists(img_path): - img_path = filename - - assert os.path.exists(img_path), \ - 'img path %s not found' % filename - - if self.channel_size == 1: - img = transforms.ToTensor()( - Image.fromarray(cv2.imread(img_path, 0))) - else: - img = Image.open(img_path).convert('RGB') - img = transforms.ToTensor()(img) - if self.truncate and self.truncate != (0, 0): - if not (img.size(1) <= self.truncate[0] - and img.size(2) <= self.truncate[1]): - continue - yield {side: img, side + '_path': filename, 'indices': i} - - -def img_sort_key(ex): - """Sort using the size of the image: (width, height).""" - return ex.src.size(2), ex.src.size(1) - - -def batch_img(data, vocab): - """Pad and batch a sequence of images.""" - c = data[0].size(0) - h = max([t.size(1) for t in data]) - w = max([t.size(2) for t in data]) - imgs = torch.zeros(len(data), c, h, w).fill_(1) - for i, img in enumerate(data): - imgs[i, :, 0:img.size(1), 0:img.size(2)] = img - return imgs - - -def image_fields(**kwargs): - img = Field( - use_vocab=False, dtype=torch.float, - postprocessing=batch_img, sequential=False) - return img diff --git a/onmt/inputters/inputter.py b/onmt/inputters/inputter.py index 2b193bc626..377d400ef6 100644 --- a/onmt/inputters/inputter.py +++ b/onmt/inputters/inputter.py @@ -1,27 +1,19 @@ # -*- coding: utf-8 -*- -import glob import os import codecs import math -from collections import Counter, defaultdict -from itertools import chain, cycle +from collections import Counter, defaultdict, OrderedDict import torch -import torchtext.data from torchtext.data import Field, RawField, LabelField from torchtext.vocab import Vocab -from torchtext.data.utils import RandomShuffler -from onmt.inputters.text_dataset import text_fields, TextMultiField -from onmt.inputters.image_dataset import image_fields -from onmt.inputters.audio_dataset import audio_fields -from onmt.inputters.vec_dataset import vec_fields +from onmt.constants import DefaultTokens +from onmt.inputters.text_dataset import text_fields from onmt.utils.logging import logger # backwards compatibility from onmt.inputters.text_dataset import _feature_tokenize # noqa: F401 -from onmt.inputters.image_dataset import ( # noqa: F401 - batch_img as make_img) import gc @@ -103,9 +95,9 @@ def get_fields( src_data_type, n_src_feats, n_tgt_feats, - pad='', - bos='', - eos='', + pad=DefaultTokens.PAD, + bos=DefaultTokens.BOS, + eos=DefaultTokens.EOS, dynamic_dict=False, with_align=False, src_truncate=None, @@ -113,7 +105,7 @@ def get_fields( ): """ Args: - src_data_type: type of the source input. Options are [text|img|audio]. + src_data_type: type of the source input. Options are [text]. n_src_feats (int): the number of source features (not counting tokens) to create a :class:`torchtext.data.Field` for. (If ``src_data_type=="text"``, these fields are stored together @@ -137,16 +129,13 @@ def get_fields( the dataset example attributes. """ - assert src_data_type in ['text', 'img', 'audio', 'vec'], \ + assert src_data_type in ['text'], \ "Data type not implemented" assert not dynamic_dict or src_data_type == 'text', \ 'it is not possible to use dynamic_dict with non-text input' fields = {} - fields_getters = {"text": text_fields, - "img": image_fields, - "audio": audio_fields, - "vec": vec_fields} + fields_getters = {"text": text_fields} src_field_kwargs = {"n_feats": n_src_feats, "include_lengths": True, @@ -165,9 +154,6 @@ def get_fields( indices = Field(use_vocab=False, dtype=torch.long, sequential=False) fields["indices"] = indices - corpus_ids = Field(use_vocab=True, sequential=False) - fields["corpus_id"] = corpus_ids - if dynamic_dict: src_map = Field( use_vocab=False, dtype=torch.float, @@ -189,13 +175,6 @@ def get_fields( return fields -def patch_fields(opt, fields): - dvocab = torch.load(opt.data + '.vocab.pt') - maybe_cid_field = dvocab.get('corpus_id', None) - if maybe_cid_field is not None: - fields.update({'corpus_id': maybe_cid_field}) - - class IterOnDevice(object): """Sent items from `iterable` on `device_id` and yield.""" @@ -222,8 +201,6 @@ def batch_to_device(batch, device_id): if hasattr(batch, 'src_map') else None batch.align = batch.align.to(device) \ if hasattr(batch, 'align') else None - batch.corpus_id = batch.corpus_id.to(device) \ - if hasattr(batch, 'corpus_id') else None def __iter__(self): for batch in self.iterable: @@ -231,108 +208,6 @@ def __iter__(self): yield batch -def load_old_vocab(vocab, data_type="text", dynamic_dict=False): - """Update a legacy vocab/field format. - - Args: - vocab: a list of (field name, torchtext.vocab.Vocab) pairs. This is the - format formerly saved in *.vocab.pt files. Or, text data - not using a :class:`TextMultiField`. - data_type (str): text, img, or audio - dynamic_dict (bool): Used for copy attention. - - Returns: - a dictionary whose keys are the field names and whose values Fields. - """ - - if _old_style_vocab(vocab): - # List[Tuple[str, Vocab]] -> List[Tuple[str, Field]] - # -> dict[str, Field] - vocab = dict(vocab) - n_src_features = sum('src_feat_' in k for k in vocab) - n_tgt_features = sum('tgt_feat_' in k for k in vocab) - fields = get_fields( - data_type, n_src_features, n_tgt_features, - dynamic_dict=dynamic_dict) - for n, f in fields.items(): - try: - f_iter = iter(f) - except TypeError: - f_iter = [(n, f)] - for sub_n, sub_f in f_iter: - if sub_n in vocab: - sub_f.vocab = vocab[sub_n] - return fields - - if _old_style_field_list(vocab): # upgrade to multifield - # Dict[str, List[Tuple[str, Field]]] - # doesn't change structure - don't return early. - fields = vocab - for base_name, vals in fields.items(): - if ((base_name == 'src' and data_type == 'text') or - base_name == 'tgt'): - assert not isinstance(vals[0][1], TextMultiField) - fields[base_name] = [(base_name, TextMultiField( - vals[0][0], vals[0][1], vals[1:]))] - - if _old_style_nesting(vocab): - # Dict[str, List[Tuple[str, Field]]] -> List[Tuple[str, Field]] - # -> dict[str, Field] - fields = dict(list(chain.from_iterable(vocab.values()))) - - return fields - - -def _old_style_vocab(vocab): - """Detect old-style vocabs (``List[Tuple[str, torchtext.data.Vocab]]``). - - Args: - vocab: some object loaded from a *.vocab.pt file - - Returns: - Whether ``vocab`` is a list of pairs where the second object - is a :class:`torchtext.vocab.Vocab` object. - - This exists because previously only the vocab objects from the fields - were saved directly, not the fields themselves, and the fields needed to - be reconstructed at training and translation time. - """ - - return isinstance(vocab, list) and \ - any(isinstance(v[1], Vocab) for v in vocab) - - -def _old_style_nesting(vocab): - """Detect old-style nesting (``dict[str, List[Tuple[str, Field]]]``).""" - return isinstance(vocab, dict) and \ - any(isinstance(v, list) for v in vocab.values()) - - -def _old_style_field_list(vocab): - """Detect old-style text fields. - - Not old style vocab, old nesting, and text-type fields not using - ``TextMultiField``. - - Args: - vocab: some object loaded from a *.vocab.pt file - - Returns: - Whether ``vocab`` is not an :func:`_old_style_vocab` and not - a :class:`TextMultiField` (using an old-style text representation). - """ - - # if tgt isn't using TextMultiField, then no text field is. - return (not _old_style_vocab(vocab)) and _old_style_nesting(vocab) and \ - (not isinstance(vocab['tgt'][0][1], TextMultiField)) - - -def old_style_vocab(vocab): - """The vocab/fields need updated.""" - return _old_style_vocab(vocab) or _old_style_field_list(vocab) or \ - _old_style_nesting(vocab) - - def filter_example(ex, use_src_len=True, use_tgt_len=True, min_src_len=1, max_src_len=float('inf'), min_tgt_len=1, max_tgt_len=float('inf')): @@ -366,43 +241,56 @@ def _pad_vocab_to_multiple(vocab, multiple): if vocab_size % multiple == 0: return target_size = int(math.ceil(vocab_size / multiple)) * multiple - padding_tokens = [ - "averyunlikelytoken%d" % i for i in range(target_size - vocab_size)] + padding_tokens = ["{}{}".format(DefaultTokens.VOCAB_PAD, i) + for i in range(target_size - vocab_size)] vocab.extend(Vocab(Counter(), specials=padding_tokens)) return vocab def _build_field_vocab(field, counter, size_multiple=1, **kwargs): # this is basically copy-pasted from torchtext. - all_specials = [ + all_special = [ field.unk_token, field.pad_token, field.init_token, field.eos_token ] - specials = [tok for tok in all_specials if tok is not None] + all_special.extend(list(kwargs.pop('specials', []))) + specials = list(OrderedDict.fromkeys( + tok for tok in all_special if tok is not None)) field.vocab = field.vocab_cls(counter, specials=specials, **kwargs) if size_multiple > 1: _pad_vocab_to_multiple(field.vocab, size_multiple) -def _load_vocab(vocab_path, name, counters, min_freq): +def _load_vocab(vocab_path, name, counters, min_freq=0): + """Inplace update `counters`[`name`] with vocab in `vocab_path`. + + Each line of `vocab_path` have a token, possible with a count. + If not with count, each token will be assigned one so that the order + of counters[name] will be same with `vocab_path`, and the minimum count + number to be `min_freq` which defaults 0. + """ # counters changes in place - vocab = _read_vocab_file(vocab_path, name) + vocab, has_count = _read_vocab_file(vocab_path, name) vocab_size = len(vocab) logger.info('Loaded %s vocab has %d tokens.' % (name, vocab_size)) - for i, token in enumerate(vocab): - # keep the order of tokens specified in the vocab file by - # adding them to the counter with decreasing counting values - counters[name][token] = vocab_size - i + min_freq + if not has_count: + for i, token in enumerate(vocab): + # keep the order of tokens specified in the vocab file by + # adding them to the counter with decreasing counting values + counters[name][token] = vocab_size - i + min_freq + else: + for token, count in vocab: + counters[name][token] = int(count) return vocab, vocab_size -def _build_fv_from_multifield(multifield, counters, build_fv_args, +def _build_fv_from_multifield(multifield, counters, build_fv_kwargs, size_multiple=1): for name, field in multifield: _build_field_vocab( field, counters[name], size_multiple=size_multiple, - **build_fv_args[name]) + **build_fv_kwargs[name]) logger.info(" * %s vocab size: %d." % (name, len(field.vocab))) @@ -410,30 +298,29 @@ def _build_fields_vocab(fields, counters, data_type, share_vocab, vocab_size_multiple, src_vocab_size, src_words_min_frequency, tgt_vocab_size, tgt_words_min_frequency, - subword_prefix="▁", - subword_prefix_is_joiner=False): - build_fv_args = defaultdict(dict) - build_fv_args["src"] = dict( - max_size=src_vocab_size, min_freq=src_words_min_frequency) - build_fv_args["tgt"] = dict( - max_size=tgt_vocab_size, min_freq=tgt_words_min_frequency) + src_specials=None, tgt_specials=None): + src_specials = list(src_specials) if src_specials is not None else [] + tgt_specials = list(tgt_specials) if tgt_specials is not None else [] + build_fv_kwargs = defaultdict(dict) + build_fv_kwargs["src"] = dict( + max_size=src_vocab_size, min_freq=src_words_min_frequency, + specials=src_specials) + build_fv_kwargs["tgt"] = dict( + max_size=tgt_vocab_size, min_freq=tgt_words_min_frequency, + specials=tgt_specials) tgt_multifield = fields["tgt"] _build_fv_from_multifield( tgt_multifield, counters, - build_fv_args, + build_fv_kwargs, size_multiple=vocab_size_multiple if not share_vocab else 1) - if fields.get("corpus_id", False): - fields["corpus_id"].vocab = fields["corpus_id"].vocab_cls( - counters["corpus_id"]) - if data_type == 'text': src_multifield = fields["src"] _build_fv_from_multifield( src_multifield, counters, - build_fv_args, + build_fv_kwargs, size_multiple=vocab_size_multiple if not share_vocab else 1) if share_vocab: @@ -441,44 +328,17 @@ def _build_fields_vocab(fields, counters, data_type, share_vocab, logger.info(" * merging src and tgt vocab...") src_field = src_multifield.base_field tgt_field = tgt_multifield.base_field + _all_specials = [item for item in src_specials + tgt_specials] _merge_field_vocabs( src_field, tgt_field, vocab_size=src_vocab_size, min_freq=src_words_min_frequency, - vocab_size_multiple=vocab_size_multiple) + vocab_size_multiple=vocab_size_multiple, + specials=_all_specials) logger.info(" * merged vocab size: %d." % len(src_field.vocab)) - build_noise_field( - src_multifield.base_field, - subword_prefix=subword_prefix, - is_joiner=subword_prefix_is_joiner) return fields -def build_noise_field(src_field, subword=True, - subword_prefix="▁", is_joiner=False, - sentence_breaks=[".", "?", "!"]): - """In place add noise related fields i.e.: - - word_start - - end_of_sentence - """ - if subword: - def is_word_start(x): return (x.startswith(subword_prefix) ^ is_joiner) - sentence_breaks = [subword_prefix + t for t in sentence_breaks] - else: - def is_word_start(x): return True - - vocab_size = len(src_field.vocab) - word_start_mask = torch.zeros([vocab_size]).bool() - end_of_sentence_mask = torch.zeros([vocab_size]).bool() - for i, t in enumerate(src_field.vocab.itos): - if is_word_start(t): - word_start_mask[i] = True - if t in sentence_breaks: - end_of_sentence_mask[i] = True - src_field.word_start_mask = word_start_mask - src_field.end_of_sentence_mask = end_of_sentence_mask - - def build_vocab(train_dataset_files, fields, data_type, share_vocab, src_vocab_path, src_vocab_size, src_words_min_frequency, tgt_vocab_path, tgt_vocab_size, tgt_words_min_frequency, @@ -573,16 +433,19 @@ def build_vocab(train_dataset_files, fields, data_type, share_vocab, def _merge_field_vocabs(src_field, tgt_field, vocab_size, min_freq, - vocab_size_multiple): + vocab_size_multiple, specials): # in the long run, shouldn't it be possible to do this by calling # build_vocab with both the src and tgt data? - specials = [tgt_field.unk_token, tgt_field.pad_token, - tgt_field.init_token, tgt_field.eos_token] + init_specials = [tgt_field.unk_token, tgt_field.pad_token, + tgt_field.init_token, tgt_field.eos_token] + all_specials = list(OrderedDict.fromkeys( + tok for tok in init_specials + specials + if tok is not None)) merged = sum( [src_field.vocab.freqs, tgt_field.vocab.freqs], Counter() ) merged_vocab = Vocab( - merged, specials=specials, + merged, specials=all_specials, max_size=vocab_size, min_freq=min_freq ) if vocab_size_multiple > 1: @@ -597,9 +460,8 @@ def _read_vocab_file(vocab_path, tag): Args: vocab_path (str): Path to utf-8 text file containing vocabulary. - Each token should be on a line by itself. Tokens must not - contain whitespace (else only before the whitespace - is considered). + Each token should be on a line, may followed with a count number + seperate by space if `with_count`. No extra whitespace is allowed. tag (str): Used for logging which vocab is being read. """ @@ -610,345 +472,11 @@ def _read_vocab_file(vocab_path, tag): "{} vocabulary not found at {}".format(tag, vocab_path)) else: with codecs.open(vocab_path, 'r', 'utf-8') as f: - return [line.strip().split()[0] for line in f if line.strip()] - - -def batch_iter(data, batch_size, batch_size_fn=None, batch_size_multiple=1): - """Yield elements from data in chunks of batch_size, where each chunk size - is a multiple of batch_size_multiple. - - This is an extended version of torchtext.data.batch. - """ - if batch_size_fn is None: - def batch_size_fn(new, count, sofar): - return count - minibatch, size_so_far = [], 0 - for ex in data: - minibatch.append(ex) - size_so_far = batch_size_fn(ex, len(minibatch), size_so_far) - if size_so_far >= batch_size: - overflowed = 0 - if size_so_far > batch_size: - overflowed += 1 - if batch_size_multiple > 1: - overflowed += ( - (len(minibatch) - overflowed) % batch_size_multiple) - if overflowed == 0: - yield minibatch - minibatch, size_so_far = [], 0 - else: - if overflowed == len(minibatch): - logger.warning( - "The batch will be filled until we reach %d," - "its size may exceed %d tokens" - % (batch_size_multiple, batch_size) - ) - else: - yield minibatch[:-overflowed] - minibatch = minibatch[-overflowed:] - size_so_far = 0 - for i, ex in enumerate(minibatch): - size_so_far = batch_size_fn(ex, i + 1, size_so_far) - if minibatch: - yield minibatch - - -def _pool(data, batch_size, batch_size_fn, batch_size_multiple, - sort_key, random_shuffler, pool_factor): - for p in torchtext.data.batch( - data, batch_size * pool_factor, - batch_size_fn=batch_size_fn): - p_batch = list(batch_iter( - sorted(p, key=sort_key), - batch_size, - batch_size_fn=batch_size_fn, - batch_size_multiple=batch_size_multiple)) - for b in random_shuffler(p_batch): - yield b - - -class OrderedIterator(torchtext.data.Iterator): - - def __init__(self, - dataset, - batch_size, - pool_factor=1, - batch_size_multiple=1, - yield_raw_example=False, - **kwargs): - super(OrderedIterator, self).__init__(dataset, batch_size, **kwargs) - self.batch_size_multiple = batch_size_multiple - self.yield_raw_example = yield_raw_example - self.dataset = dataset - self.pool_factor = pool_factor - - def create_batches(self): - if self.train: - if self.yield_raw_example: - self.batches = batch_iter( - self.data(), - 1, - batch_size_fn=None, - batch_size_multiple=1) + lines = [line.strip() for line in f if line.strip()] + first_line = lines[0].split(None, 1) + has_count = (len(first_line) == 2 and first_line[-1].isdigit()) + if has_count: + vocab = [line.split(None, 1) for line in lines] else: - self.batches = _pool( - self.data(), - self.batch_size, - self.batch_size_fn, - self.batch_size_multiple, - self.sort_key, - self.random_shuffler, - self.pool_factor) - else: - self.batches = [] - for b in batch_iter( - self.data(), - self.batch_size, - batch_size_fn=self.batch_size_fn, - batch_size_multiple=self.batch_size_multiple): - self.batches.append(sorted(b, key=self.sort_key)) - - def __iter__(self): - """ - Extended version of the definition in torchtext.data.Iterator. - Added yield_raw_example behaviour to yield a torchtext.data.Example - instead of a torchtext.data.Batch object. - """ - while True: - self.init_epoch() - for idx, minibatch in enumerate(self.batches): - # fast-forward if loaded from state - if self._iterations_this_epoch > idx: - continue - self.iterations += 1 - self._iterations_this_epoch += 1 - if self.sort_within_batch: - # NOTE: `rnn.pack_padded_sequence` requires that a - # minibatch be sorted by decreasing order, which - # requires reversing relative to typical sort keys - if self.sort: - minibatch.reverse() - else: - minibatch.sort(key=self.sort_key, reverse=True) - if self.yield_raw_example: - yield minibatch[0] - else: - yield torchtext.data.Batch( - minibatch, - self.dataset, - self.device) - if not self.repeat: - return - - -class MultipleDatasetIterator(object): - """ - This takes a list of iterable objects (DatasetLazyIter) and their - respective weights, and yields a batch in the wanted proportions. - """ - def __init__(self, - train_shards, - fields, - device, - opt): - self.index = -1 - self.iterables = [] - self.weights = [] - for shard, weight in zip(train_shards, opt.data_weights): - if weight > 0: - self.iterables.append( - build_dataset_iter(shard, fields, opt, multi=True)) - self.weights.append(weight) - self.init_iterators = True - # self.weights = opt.data_weights - self.batch_size = opt.batch_size - self.batch_size_fn = max_tok_len \ - if opt.batch_type == "tokens" else None - if opt.batch_size_multiple is not None: - self.batch_size_multiple = opt.batch_size_multiple - else: - self.batch_size_multiple = 8 if opt.model_dtype == "fp16" else 1 - self.device = device - # Temporarily load one shard to retrieve sort_key for data_type - temp_dataset = torch.load(self.iterables[0]._paths[0]) - self.sort_key = temp_dataset.sort_key - self.random_shuffler = RandomShuffler() - self.pool_factor = opt.pool_factor - del temp_dataset - - def _iter_datasets(self): - if self.init_iterators: - self.iterators = [iter(iterable) for iterable in self.iterables] - self.init_iterators = False - for weight in self.weights: - self.index = (self.index + 1) % len(self.iterators) - for i in range(weight): - yield self.iterators[self.index] - - def _iter_examples(self): - for iterator in cycle(self._iter_datasets()): - yield next(iterator) - - def __iter__(self): - while True: - for minibatch in _pool( - self._iter_examples(), - self.batch_size, - self.batch_size_fn, - self.batch_size_multiple, - self.sort_key, - self.random_shuffler, - self.pool_factor): - minibatch = sorted(minibatch, key=self.sort_key, reverse=True) - yield torchtext.data.Batch(minibatch, - self.iterables[0].dataset, - self.device) - - -class DatasetLazyIter(object): - """Yield data from sharded dataset files. - - Args: - dataset_paths: a list containing the locations of dataset files. - fields (dict[str, Field]): fields dict for the - datasets. - batch_size (int): batch size. - batch_size_fn: custom batch process function. - device: See :class:`OrderedIterator` ``device``. - is_train (bool): train or valid? - """ - - def __init__(self, dataset_paths, fields, batch_size, batch_size_fn, - batch_size_multiple, device, is_train, pool_factor, - repeat=True, num_batches_multiple=1, yield_raw_example=False): - self._paths = dataset_paths - self.fields = fields - self.batch_size = batch_size - self.batch_size_fn = batch_size_fn - self.batch_size_multiple = batch_size_multiple - self.device = device - self.is_train = is_train - self.repeat = repeat - self.num_batches_multiple = num_batches_multiple - self.yield_raw_example = yield_raw_example - self.pool_factor = pool_factor - - def _iter_dataset(self, path): - logger.info('Loading dataset from %s' % path) - cur_dataset = torch.load(path) - logger.info('number of examples: %d' % len(cur_dataset)) - cur_dataset.fields = self.fields - cur_iter = OrderedIterator( - dataset=cur_dataset, - batch_size=self.batch_size, - pool_factor=self.pool_factor, - batch_size_multiple=self.batch_size_multiple, - batch_size_fn=self.batch_size_fn, - device=self.device, - train=self.is_train, - sort=False, - sort_within_batch=True, - repeat=False, - yield_raw_example=self.yield_raw_example - ) - for batch in cur_iter: - self.dataset = cur_iter.dataset - yield batch - - # NOTE: This is causing some issues for consumer/producer, - # as we may still have some of those examples in some queue - # cur_dataset.examples = None - # gc.collect() - # del cur_dataset - # gc.collect() - - def __iter__(self): - num_batches = 0 - paths = self._paths - if self.is_train and self.repeat: - # Cycle through the shards indefinitely. - paths = cycle(paths) - for path in paths: - for batch in self._iter_dataset(path): - yield batch - num_batches += 1 - if self.is_train and not self.repeat and \ - num_batches % self.num_batches_multiple != 0: - # When the dataset is not repeated, we might need to ensure that - # the number of returned batches is the multiple of a given value. - # This is important for multi GPU training to ensure that all - # workers have the same number of batches to process. - for path in paths: - for batch in self._iter_dataset(path): - yield batch - num_batches += 1 - if num_batches % self.num_batches_multiple == 0: - return - - -def max_tok_len(new, count, sofar): - """ - In token batching scheme, the number of sequences is limited - such that the total number of src/tgt tokens (including padding) - in a batch <= batch_size - """ - # Maintains the longest src and tgt length in the current batch - global max_src_in_batch, max_tgt_in_batch # this is a hack - # Reset current longest length at a new batch (count=1) - if count == 1: - max_src_in_batch = 0 - max_tgt_in_batch = 0 - # Src: [ w1 ... wN ] - max_src_in_batch = max(max_src_in_batch, len(new.src[0]) + 2) - # Tgt: [w1 ... wM ] - max_tgt_in_batch = max(max_tgt_in_batch, len(new.tgt[0]) + 1) - src_elements = count * max_src_in_batch - tgt_elements = count * max_tgt_in_batch - return max(src_elements, tgt_elements) - - -def build_dataset_iter(corpus_type, fields, opt, is_train=True, multi=False): - """ - This returns user-defined train/validate data iterator for the trainer - to iterate over. We implement simple ordered iterator strategy here, - but more sophisticated strategy like curriculum learning is ok too. - """ - dataset_glob = opt.data + '.' + corpus_type + '.[0-9]*.pt' - dataset_paths = list(sorted( - glob.glob(dataset_glob), - key=lambda p: int(p.split(".")[-2]))) - - if not dataset_paths: - if is_train: - raise ValueError('Training data %s not found' % dataset_glob) - else: - return None - if multi: - batch_size = 1 - batch_fn = None - batch_size_multiple = 1 - else: - batch_size = opt.batch_size if is_train else opt.valid_batch_size - batch_fn = max_tok_len \ - if is_train and opt.batch_type == "tokens" else None - batch_size_multiple = 8 if opt.model_dtype == "fp16" else 1 - - device = "cpu" - - return DatasetLazyIter( - dataset_paths, - fields, - batch_size, - batch_fn, - batch_size_multiple, - device, - is_train, - opt.pool_factor, - repeat=not opt.single_pass, - num_batches_multiple=max(opt.accum_count) * opt.world_size, - yield_raw_example=multi) - - -def build_dataset_iter_multiple(train_shards, fields, opt): - return MultipleDatasetIterator( - train_shards, fields, "cpu", opt) + vocab = [line.strip().split()[0] for line in lines] + return vocab, has_count diff --git a/onmt/inputters/iterator.py b/onmt/inputters/iterator.py new file mode 100644 index 0000000000..6f2900c1b4 --- /dev/null +++ b/onmt/inputters/iterator.py @@ -0,0 +1,153 @@ +"""Contains all methods relate to iteration.""" +import torchtext.data + +from onmt.utils.logging import logger + + +def batch_iter(data, batch_size, batch_size_fn=None, batch_size_multiple=1): + """Yield elements from data in chunks of batch_size, where each chunk size + is a multiple of batch_size_multiple. + + This is an extended version of torchtext.data.batch. + """ + if batch_size_fn is None: + def batch_size_fn(new, count, sofar): + return count + minibatch, size_so_far = [], 0 + for ex in data: + minibatch.append(ex) + size_so_far = batch_size_fn(ex, len(minibatch), size_so_far) + if size_so_far >= batch_size: + overflowed = 0 + if size_so_far > batch_size: + overflowed += 1 + if batch_size_multiple > 1: + overflowed += ( + (len(minibatch) - overflowed) % batch_size_multiple) + if overflowed == 0: + yield minibatch + minibatch, size_so_far = [], 0 + else: + if overflowed == len(minibatch): + logger.warning( + "The batch will be filled until we reach %d," + "its size may exceed %d tokens" + % (batch_size_multiple, batch_size) + ) + else: + yield minibatch[:-overflowed] + minibatch = minibatch[-overflowed:] + size_so_far = 0 + for i, ex in enumerate(minibatch): + size_so_far = batch_size_fn(ex, i + 1, size_so_far) + if minibatch: + yield minibatch + + +def _pool(data, batch_size, batch_size_fn, batch_size_multiple, + sort_key, random_shuffler, pool_factor): + for p in torchtext.data.batch( + data, batch_size * pool_factor, + batch_size_fn=batch_size_fn): + p_batch = list(batch_iter( + sorted(p, key=sort_key), + batch_size, + batch_size_fn=batch_size_fn, + batch_size_multiple=batch_size_multiple)) + for b in random_shuffler(p_batch): + yield b + + +class OrderedIterator(torchtext.data.Iterator): + + def __init__(self, + dataset, + batch_size, + pool_factor=1, + batch_size_multiple=1, + yield_raw_example=False, + **kwargs): + super(OrderedIterator, self).__init__(dataset, batch_size, **kwargs) + self.batch_size_multiple = batch_size_multiple + self.yield_raw_example = yield_raw_example + self.dataset = dataset + self.pool_factor = pool_factor + + def create_batches(self): + if self.train: + if self.yield_raw_example: + self.batches = batch_iter( + self.data(), + 1, + batch_size_fn=None, + batch_size_multiple=1) + else: + self.batches = _pool( + self.data(), + self.batch_size, + self.batch_size_fn, + self.batch_size_multiple, + self.sort_key, + self.random_shuffler, + self.pool_factor) + else: + self.batches = [] + for b in batch_iter( + self.data(), + self.batch_size, + batch_size_fn=self.batch_size_fn, + batch_size_multiple=self.batch_size_multiple): + self.batches.append(sorted(b, key=self.sort_key)) + + def __iter__(self): + """ + Extended version of the definition in torchtext.data.Iterator. + Added yield_raw_example behaviour to yield a torchtext.data.Example + instead of a torchtext.data.Batch object. + """ + while True: + self.init_epoch() + for idx, minibatch in enumerate(self.batches): + # fast-forward if loaded from state + if self._iterations_this_epoch > idx: + continue + self.iterations += 1 + self._iterations_this_epoch += 1 + if self.sort_within_batch: + # NOTE: `rnn.pack_padded_sequence` requires that a + # minibatch be sorted by decreasing order, which + # requires reversing relative to typical sort keys + if self.sort: + minibatch.reverse() + else: + minibatch.sort(key=self.sort_key, reverse=True) + if self.yield_raw_example: + yield minibatch[0] + else: + yield torchtext.data.Batch( + minibatch, + self.dataset, + self.device) + if not self.repeat: + return + + +def max_tok_len(new, count, sofar): + """ + In token batching scheme, the number of sequences is limited + such that the total number of src/tgt tokens (including padding) + in a batch <= batch_size + """ + # Maintains the longest src and tgt length in the current batch + global max_src_in_batch, max_tgt_in_batch # this is a hack + # Reset current longest length at a new batch (count=1) + if count == 1: + max_src_in_batch = 0 + max_tgt_in_batch = 0 + # Src: [ w1 ... wN ] + max_src_in_batch = max(max_src_in_batch, len(new.src[0]) + 2) + # Tgt: [w1 ... wM ] + max_tgt_in_batch = max(max_tgt_in_batch, len(new.tgt[0]) + 1) + src_elements = count * max_src_in_batch + tgt_elements = count * max_tgt_in_batch + return max(src_elements, tgt_elements) diff --git a/onmt/inputters/text_dataset.py b/onmt/inputters/text_dataset.py index 86e8665f7e..5ab9260626 100644 --- a/onmt/inputters/text_dataset.py +++ b/onmt/inputters/text_dataset.py @@ -5,11 +5,12 @@ import torch from torchtext.data import Field, RawField +from onmt.constants import DefaultTokens from onmt.inputters.datareader_base import DataReaderBase class TextDataReader(DataReaderBase): - def read(self, sequences, side, _dir=None): + def read(self, sequences, side): """Read text data from disk. Args: @@ -17,16 +18,12 @@ def read(self, sequences, side, _dir=None): path to text file or iterable of the actual text data. side (str): Prefix used in return dict. Usually ``"src"`` or ``"tgt"``. - _dir (NoneType): Leave as ``None``. This parameter exists to - conform with the :func:`DataReaderBase.read()` signature. Yields: dictionaries whose keys are the names of fields and whose values are more or less the result of tokenizing with those fields. """ - assert _dir is None or _dir == "", \ - "Cannot use _dir with TextDataReader." if isinstance(sequences, str): sequences = DataReaderBase._read_file(sequences) for i, seq in enumerate(sequences): @@ -170,9 +167,9 @@ def text_fields(**kwargs): n_feats = kwargs["n_feats"] include_lengths = kwargs["include_lengths"] base_name = kwargs["base_name"] - pad = kwargs.get("pad", "") - bos = kwargs.get("bos", "") - eos = kwargs.get("eos", "") + pad = kwargs.get("pad", DefaultTokens.PAD) + bos = kwargs.get("bos", DefaultTokens.BOS) + eos = kwargs.get("eos", DefaultTokens.EOS) truncate = kwargs.get("truncate", None) fields_ = [] feat_delim = u"│" if n_feats > 0 else None diff --git a/onmt/inputters/vec_dataset.py b/onmt/inputters/vec_dataset.py deleted file mode 100644 index 3ab6454fc5..0000000000 --- a/onmt/inputters/vec_dataset.py +++ /dev/null @@ -1,149 +0,0 @@ -import os - -import torch -from torchtext.data import Field - -from onmt.inputters.datareader_base import DataReaderBase - -try: - import numpy as np -except ImportError: - np = None - - -class VecDataReader(DataReaderBase): - """Read feature vector data from disk. - Raises: - onmt.inputters.datareader_base.MissingDependencyException: If - importing ``np`` fails. - """ - - def __init__(self): - self._check_deps() - - @classmethod - def _check_deps(cls): - if np is None: - cls._raise_missing_dep("np") - - def read(self, vecs, side, vec_dir=None): - """Read data into dicts. - Args: - vecs (str or Iterable[str]): Sequence of feature vector paths or - path to file containing feature vector paths. - In either case, the filenames may be relative to ``vec_dir`` - (default behavior) or absolute. - side (str): Prefix used in return dict. Usually - ``"src"`` or ``"tgt"``. - vec_dir (str): Location of source vectors. See ``vecs``. - Yields: - A dictionary containing feature vector data. - """ - - if isinstance(vecs, str): - vecs = DataReaderBase._read_file(vecs) - - for i, filename in enumerate(vecs): - filename = filename.decode("utf-8").strip() - vec_path = os.path.join(vec_dir, filename) - if not os.path.exists(vec_path): - vec_path = filename - - assert os.path.exists(vec_path), \ - 'vec path %s not found' % filename - - vec = np.load(vec_path) - yield {side: torch.from_numpy(vec), - side + "_path": filename, "indices": i} - - -def vec_sort_key(ex): - """Sort using the length of the vector sequence.""" - return ex.src.shape[0] - - -class VecSeqField(Field): - """Defines an vector datatype and instructions for converting to Tensor. - See :class:`Fields` for attribute descriptions. - """ - - def __init__(self, preprocessing=None, postprocessing=None, - include_lengths=False, batch_first=False, pad_index=0, - is_target=False): - super(VecSeqField, self).__init__( - sequential=True, use_vocab=False, init_token=None, - eos_token=None, fix_length=False, dtype=torch.float, - preprocessing=preprocessing, postprocessing=postprocessing, - lower=False, tokenize=None, include_lengths=include_lengths, - batch_first=batch_first, pad_token=pad_index, unk_token=None, - pad_first=False, truncate_first=False, stop_words=None, - is_target=is_target - ) - - def pad(self, minibatch): - """Pad a batch of examples to the length of the longest example. - Args: - minibatch (List[torch.FloatTensor]): A list of audio data, - each having shape ``(len, n_feats, feat_dim)`` - where len is variable. - Returns: - torch.FloatTensor or Tuple[torch.FloatTensor, List[int]]: The - padded tensor of shape - ``(batch_size, max_len, n_feats, feat_dim)``. - and a list of the lengths if `self.include_lengths` is `True` - else just returns the padded tensor. - """ - - assert not self.pad_first and not self.truncate_first \ - and not self.fix_length and self.sequential - minibatch = list(minibatch) - lengths = [x.size(0) for x in minibatch] - max_len = max(lengths) - nfeats = minibatch[0].size(1) - feat_dim = minibatch[0].size(2) - feats = torch.full((len(minibatch), max_len, nfeats, feat_dim), - self.pad_token, dtype=self.dtype) - for i, (feat, len_) in enumerate(zip(minibatch, lengths)): - feats[i, 0:len_, :, :] = feat - if self.include_lengths: - return (feats, lengths) - return feats - - def numericalize(self, arr, device=None): - """Turn a batch of examples that use this field into a Variable. - If the field has ``include_lengths=True``, a tensor of lengths will be - included in the return value. - Args: - arr (torch.FloatTensor or Tuple(torch.FloatTensor, List[int])): - List of tokenized and padded examples, or tuple of List of - tokenized and padded examples and List of lengths of each - example if self.include_lengths is True. - device (str or torch.device): See `Field.numericalize`. - """ - - assert self.use_vocab is False - if self.include_lengths and not isinstance(arr, tuple): - raise ValueError("Field has include_lengths set to True, but " - "input data is not a tuple of " - "(data batch, batch lengths).") - if isinstance(arr, tuple): - arr, lengths = arr - lengths = torch.tensor(lengths, dtype=torch.int, device=device) - arr = arr.to(device) - - if self.postprocessing is not None: - arr = self.postprocessing(arr, None) - - if self.sequential and not self.batch_first: - arr = arr.permute(1, 0, 2, 3) - if self.sequential: - arr = arr.contiguous() - - if self.include_lengths: - return arr, lengths - return arr - - -def vec_fields(**kwargs): - vec = VecSeqField(pad_index=0, include_lengths=True) - return vec diff --git a/onmt/model_builder.py b/onmt/model_builder.py index 43cd9731c8..5ad9333b86 100644 --- a/onmt/model_builder.py +++ b/onmt/model_builder.py @@ -7,13 +7,12 @@ import torch.nn as nn from torch.nn.init import xavier_uniform_ -import onmt.inputters as inputters import onmt.modules from onmt.encoders import str2enc from onmt.decoders import str2dec -from onmt.modules import Embeddings, VecEmbedding, CopyGenerator +from onmt.modules import Embeddings, CopyGenerator from onmt.modules.util_class import Cast from onmt.utils.misc import use_gpu from onmt.utils.logging import logger @@ -29,15 +28,6 @@ def build_embeddings(opt, text_field, for_encoder=True): """ emb_dim = opt.src_word_vec_size if for_encoder else opt.tgt_word_vec_size - if opt.model_type == "vec" and for_encoder: - return VecEmbedding( - opt.feat_vec_size, - emb_dim, - position_encoding=opt.position_encoding, - dropout=(opt.dropout[0] if type(opt.dropout) is list - else opt.dropout), - ) - pad_indices = [f.vocab.stoi[f.pad_token] for _, f in text_field] word_padding_idx, feat_pad_indices = pad_indices[0], pad_indices[1:] @@ -71,8 +61,7 @@ def build_encoder(opt, embeddings): opt: the option in current environment. embeddings (Embeddings): vocab embeddings for this encoder. """ - enc_type = opt.encoder_type if opt.model_type == "text" \ - or opt.model_type == "vec" else opt.model_type + enc_type = opt.encoder_type if opt.model_type == "text" else opt.model_type return str2enc[enc_type].from_opt(opt, embeddings) @@ -97,13 +86,7 @@ def load_test_model(opt, model_path=None): model_opt = ArgumentParser.ckpt_model_opts(checkpoint['opt']) ArgumentParser.update_model_opts(model_opt) ArgumentParser.validate_model_opts(model_opt) - vocab = checkpoint['vocab'] - if inputters.old_style_vocab(vocab): - fields = inputters.load_old_vocab( - vocab, opt.data_type, dynamic_dict=model_opt.copy_attn - ) - else: - fields = vocab + fields = checkpoint['vocab'] model = build_base_model(model_opt, fields, use_gpu(opt), checkpoint, opt.gpu) @@ -139,7 +122,7 @@ def build_base_model(model_opt, fields, gpu, checkpoint=None, gpu_id=None): model_opt.attention_dropout = model_opt.dropout # Build embeddings. - if model_opt.model_type == "text" or model_opt.model_type == "vec": + if model_opt.model_type == "text": src_field = fields["src"] src_emb = build_embeddings(model_opt, src_field) else: diff --git a/onmt/models/model.py b/onmt/models/model.py index 920adcc981..10422834ea 100644 --- a/onmt/models/model.py +++ b/onmt/models/model.py @@ -54,3 +54,24 @@ def forward(self, src, tgt, lengths, bptt=False, with_align=False): def update_dropout(self, dropout): self.encoder.update_dropout(dropout) self.decoder.update_dropout(dropout) + + def count_parameters(self, log=print): + """Count number of parameters in model (& print with `log` callback). + + Returns: + (int, int): + * encoder side parameter count + * decoder side parameter count + """ + + enc, dec = 0, 0 + for name, param in self.named_parameters(): + if 'encoder' in name: + enc += param.nelement() + else: + dec += param.nelement() + if callable(log): + log('encoder: {}'.format(enc)) + log('decoder: {}'.format(dec)) + log('* number of parameters: {}'.format(enc + dec)) + return enc, dec diff --git a/onmt/models/model_saver.py b/onmt/models/model_saver.py index fa1c890006..86a278554e 100644 --- a/onmt/models/model_saver.py +++ b/onmt/models/model_saver.py @@ -8,6 +8,10 @@ def build_model_saver(model_opt, opt, model, fields, optim): + # _check_save_model_path + save_model_path = os.path.abspath(opt.save_model) + os.makedirs(os.path.dirname(save_model_path), exist_ok=True) + model_saver = ModelSaver(opt.save_model, model, model_opt, @@ -17,6 +21,16 @@ def build_model_saver(model_opt, opt, model, fields, optim): return model_saver +def load_checkpoint(ckpt_path): + """Load checkpoint from `ckpt_path` if any else return `None`.""" + checkpoint = None + if ckpt_path: + logger.info('Loading checkpoint from %s' % ckpt_path) + checkpoint = torch.load(ckpt_path, + map_location=lambda storage, loc: storage) + return checkpoint + + class ModelSaverBase(object): """Base class for model saving operations @@ -68,11 +82,12 @@ def save(self, step, moving_average=None): self._rm_checkpoint(todel) self.checkpoint_queue.append(chkpt_name) - def _save(self, step): + def _save(self, step, model): """Save a resumable checkpoint. Args: step (int): step number + model (nn.Module): torch model to save Returns: (object, str): diff --git a/onmt/modules/__init__.py b/onmt/modules/__init__.py index 22a4ecf32f..38ff142b47 100644 --- a/onmt/modules/__init__.py +++ b/onmt/modules/__init__.py @@ -6,15 +6,12 @@ from onmt.modules.copy_generator import CopyGenerator, CopyGeneratorLoss, \ CopyGeneratorLossCompute from onmt.modules.multi_headed_attn import MultiHeadedAttention -from onmt.modules.embeddings import Embeddings, PositionalEncoding, \ - VecEmbedding +from onmt.modules.embeddings import Embeddings, PositionalEncoding from onmt.modules.weight_norm import WeightNormConv2d from onmt.modules.average_attn import AverageAttention -import onmt.modules.source_noise # noqa - __all__ = ["Elementwise", "context_gate_factory", "ContextGate", "GlobalAttention", "ConvMultiStepAttention", "CopyGenerator", "CopyGeneratorLoss", "CopyGeneratorLossCompute", "MultiHeadedAttention", "Embeddings", "PositionalEncoding", - "WeightNormConv2d", "AverageAttention", "VecEmbedding"] + "WeightNormConv2d", "AverageAttention"] diff --git a/onmt/modules/embeddings.py b/onmt/modules/embeddings.py index 72d112c18a..d46972e78f 100644 --- a/onmt/modules/embeddings.py +++ b/onmt/modules/embeddings.py @@ -1,4 +1,5 @@ """ Embeddings module """ +import six import math import warnings @@ -6,6 +7,7 @@ import torch.nn as nn from onmt.modules.util_class import Elementwise +from onmt.utils.logging import logger class PositionalEncoding(nn.Module): @@ -54,38 +56,6 @@ def forward(self, emb, step=None): return emb -class VecEmbedding(nn.Module): - def __init__(self, vec_size, - emb_dim, - position_encoding=False, - dropout=0): - super(VecEmbedding, self).__init__() - self.embedding_size = emb_dim - self.proj = nn.Linear(vec_size, emb_dim, bias=False) - self.word_padding_idx = 0 # vector seqs are zero-padded - self.position_encoding = position_encoding - - if self.position_encoding: - self.pe = PositionalEncoding(dropout, self.embedding_size) - - def forward(self, x, step=None): - """ - Args: - x (FloatTensor): input, ``(len, batch, 1, vec_feats)``. - - Returns: - FloatTensor: embedded vecs ``(len, batch, embedding_size)``. - """ - x = self.proj(x).squeeze(2) - if self.position_encoding: - x = self.pe(x, step=step) - - return x - - def load_pretrained_vectors(self, file): - assert not file - - class Embeddings(nn.Module): """Words embeddings for encoder/decoder. @@ -281,3 +251,130 @@ def forward(self, source, step=None): def update_dropout(self, dropout): if self.position_encoding: self._modules['make_embedding'][1].dropout.p = dropout + + +# Some utilitary functions for pretrained embeddings + +def read_embeddings(path, skip_lines=0, filter_set=None): + """ + Read an embeddings file in the glove format. + """ + embs = dict() + total_vectors_in_file = 0 + with open(path, 'rb') as f: + for i, line in enumerate(f): + if i < skip_lines: + continue + if not line: + break + if len(line) == 0: + # is this reachable? + continue + + l_split = line.decode('utf8').strip().split(' ') + if len(l_split) == 2: + continue + total_vectors_in_file += 1 + if filter_set is not None and l_split[0] not in filter_set: + continue + embs[l_split[0]] = [float(em) for em in l_split[1:]] + return embs, total_vectors_in_file + + +def calc_vocab_load_stats(vocab, loaded_embed_dict): + matching_count = len( + set(vocab.stoi.keys()) & set(loaded_embed_dict.keys())) + missing_count = len(vocab) - matching_count + percent_matching = matching_count / len(vocab) * 100 + return matching_count, missing_count, percent_matching + + +def convert_to_torch_tensor(word_to_float_list_dict, vocab): + dim = len(six.next(six.itervalues(word_to_float_list_dict))) + tensor = torch.zeros((len(vocab), dim)) + for word, values in word_to_float_list_dict.items(): + tensor[vocab.stoi[word]] = torch.Tensor(values) + return tensor + + +def prepare_pretrained_embeddings(opt, fields): + if all([opt.both_embeddings is None, + opt.src_embeddings is None, + opt.tgt_embeddings is None]): + return + + assert opt.save_data, "-save_data is required when using \ + pretrained embeddings." + + vocs = [] + for side in ['src', 'tgt']: + try: + vocab = fields[side].base_field.vocab + except AttributeError: + vocab = fields[side].vocab + vocs.append(vocab) + enc_vocab, dec_vocab = vocs + + skip_lines = 1 if opt.embeddings_type == "word2vec" else 0 + if opt.both_embeddings is not None: + set_of_src_and_tgt_vocab = \ + set(enc_vocab.stoi.keys()) | set(dec_vocab.stoi.keys()) + logger.info("Reading encoder and decoder embeddings from {}".format( + opt.both_embeddings)) + src_vectors, total_vec_count = \ + read_embeddings(opt.both_embeddings, skip_lines, + set_of_src_and_tgt_vocab) + tgt_vectors = src_vectors + logger.info("\tFound {} total vectors in file".format(total_vec_count)) + else: + if opt.src_embeddings is not None: + logger.info("Reading encoder embeddings from {}".format( + opt.src_embeddings)) + src_vectors, total_vec_count = read_embeddings( + opt.src_embeddings, skip_lines, + filter_set=enc_vocab.stoi + ) + logger.info("\tFound {} total vectors in file.".format( + total_vec_count)) + else: + src_vectors = None + if opt.tgt_embeddings is not None: + logger.info("Reading decoder embeddings from {}".format( + opt.tgt_embeddings)) + tgt_vectors, total_vec_count = read_embeddings( + opt.tgt_embeddings, skip_lines, + filter_set=dec_vocab.stoi + ) + logger.info( + "\tFound {} total vectors in file".format(total_vec_count)) + else: + tgt_vectors = None + logger.info("After filtering to vectors in vocab:") + if opt.src_embeddings is not None or opt.both_embeddings is not None: + logger.info("\t* enc: %d match, %d missing, (%.2f%%)" + % calc_vocab_load_stats(enc_vocab, src_vectors)) + if opt.tgt_embeddings is not None or opt.both_embeddings is not None: + logger.info("\t* dec: %d match, %d missing, (%.2f%%)" + % calc_vocab_load_stats(dec_vocab, tgt_vectors)) + + # Write to file + enc_output_file = opt.save_data + ".enc_embeddings.pt" + dec_output_file = opt.save_data + ".dec_embeddings.pt" + if opt.src_embeddings is not None or opt.both_embeddings is not None: + logger.info("\nSaving encoder embeddings as:\n\t* enc: %s" + % enc_output_file) + torch.save( + convert_to_torch_tensor(src_vectors, enc_vocab), + enc_output_file + ) + # set the opt in place + opt.pre_word_vecs_enc = enc_output_file + if opt.tgt_embeddings is not None or opt.both_embeddings is not None: + logger.info("\nSaving decoder embeddings as:\n\t* dec: %s" + % dec_output_file) + torch.save( + convert_to_torch_tensor(tgt_vectors, dec_vocab), + dec_output_file + ) + # set the opt in place + opt.pre_word_vecs_dec = dec_output_file diff --git a/onmt/modules/source_noise.py b/onmt/modules/source_noise.py deleted file mode 100644 index 4038129684..0000000000 --- a/onmt/modules/source_noise.py +++ /dev/null @@ -1,351 +0,0 @@ -import math -import torch - - -def aeq(ref, *args): - for i, e in enumerate(args): - assert ref == e, "%s != %s (element %d)" % (str(ref), str(e), i) - - -class NoiseBase(object): - def __init__(self, prob, pad_idx=1, device_id="cpu", - ids_to_noise=[], **kwargs): - self.prob = prob - self.pad_idx = 1 - self.skip_first = 1 - self.device_id = device_id - self.ids_to_noise = set([t.item() for t in ids_to_noise]) - - def __call__(self, batch): - return self.noise_batch(batch) - - def to_device(self, t): - return t.to(torch.device(self.device_id)) - - def noise_batch(self, batch): - source, lengths = batch.src if isinstance(batch.src, tuple) \ - else (batch.src, [None] * batch.src.size(1)) - # noise_skip = batch.noise_skip - # aeq(len(batch.noise_skip) == source.size(1)) - - # source is [src_len x bs x feats] - skipped = source[:self.skip_first, :, :] - source = source[self.skip_first:] - for i in range(source.size(1)): - if hasattr(batch, 'corpus_id'): - corpus_id = batch.corpus_id[i] - if corpus_id.item() not in self.ids_to_noise: - continue - tokens = source[:, i, 0] - mask = tokens.ne(self.pad_idx) - - masked_tokens = tokens[mask] - noisy_tokens, length = self.noise_source( - masked_tokens, length=lengths[i]) - - lengths[i] = length - - # source might increase length so we need to resize the whole - # tensor - delta = length - (source.size(0) - self.skip_first) - if delta > 0: - pad = torch.ones([delta], - device=source.device, - dtype=source.dtype) - pad *= self.pad_idx - pad = pad.unsqueeze(1).expand(-1, 15).unsqueeze(2) - - source = torch.cat([source, source]) - source[:noisy_tokens.size(0), i, 0] = noisy_tokens - - source = torch.cat([skipped, source]) - - # remove useless pad - max_len = lengths.max() - source = source[:max_len, :, :] - - batch.src = source, lengths - return batch - - def noise_source(self, source, **kwargs): - raise NotImplementedError() - - -class MaskNoise(NoiseBase): - def noise_batch(self, batch): - raise ValueError("MaskNoise has not been updated to tensor noise") - # def s(self, tokens): - # prob = self.prob - # r = torch.rand([len(tokens)]) - # mask = False - # masked = [] - # for i, tok in enumerate(tokens): - # if tok.startswith(subword_prefix): - # if r[i].item() <= prob: - # masked.append(mask_tok) - # mask = True - # else: - # masked.append(tok) - # mask = False - # else: - # if mask: - # pass - # else: - # masked.append(tok) - # return masked - - -class SenShufflingNoise(NoiseBase): - def __init__(self, *args, end_of_sentence_mask=None, **kwargs): - super(SenShufflingNoise, self).__init__(*args, **kwargs) - assert end_of_sentence_mask is not None - self.end_of_sentence_mask = self.to_device(end_of_sentence_mask) - - def is_end_of_sentence(self, source): - return self.end_of_sentence_mask.gather(0, source) - - def noise_source(self, source, length=None, **kwargs): - # aeq(source.size(0), length) - full_stops = self.is_end_of_sentence(source) - # Pretend it ends with a full stop so last span is a sentence - full_stops[-1] = 1 - - # Tokens that are full stops, where the previous token is not - sentence_ends = (full_stops[1:] * ~full_stops[:-1]).nonzero( - as_tuple=False) + 2 - result = source.clone() - - num_sentences = sentence_ends.size(0) - num_to_permute = math.ceil((num_sentences * 2 * self.prob) / 2.0) - substitutions = torch.randperm(num_sentences)[:num_to_permute] - ordering = torch.arange(0, num_sentences) - ordering[substitutions] = substitutions[torch.randperm(num_to_permute)] - - index = 0 - for i in ordering: - sentence = source[(sentence_ends[i - 1] if i > - 0 else 1):sentence_ends[i]] - result[index:index + sentence.size(0)] = sentence - index += sentence.size(0) - # aeq(source.size(0), length) - return result, length - - -class InfillingNoise(NoiseBase): - def __init__(self, *args, infilling_poisson_lambda=3.0, - word_start_mask=None, **kwargs): - super(InfillingNoise, self).__init__(*args, **kwargs) - self.poisson_lambda = infilling_poisson_lambda - self.mask_span_distribution = self._make_poisson(self.poisson_lambda) - self.mask_idx = 0 - assert word_start_mask is not None - self.word_start_mask = self.to_device(word_start_mask) - - # -1: keep everything (i.e. 1 mask per token) - # 0: replace everything (i.e. no mask) - # 1: 1 mask per span - self.replace_length = 1 - - def _make_poisson(self, poisson_lambda): - # fairseq/data/denoising_dataset.py - _lambda = poisson_lambda - - lambda_to_the_k = 1 - e_to_the_minus_lambda = math.exp(-_lambda) - k_factorial = 1 - ps = [] - for k in range(0, 128): - ps.append(e_to_the_minus_lambda * lambda_to_the_k / k_factorial) - lambda_to_the_k *= _lambda - k_factorial *= (k + 1) - if ps[-1] < 0.0000001: - break - ps = torch.tensor(ps, device=torch.device(self.device_id)) - return torch.distributions.Categorical(ps) - - def is_word_start(self, source): - # print("src size: ", source.size()) - # print("ws size: ", self.word_start_mask.size()) - # print("max: ", source.max()) - # assert source.max() < self.word_start_mask.size(0) - # assert source.min() >= 0 - return self.word_start_mask.gather(0, source) - - def noise_source(self, source, **kwargs): - - is_word_start = self.is_word_start(source) - # assert source.size() == is_word_start.size() - # aeq(source.eq(self.pad_idx).long().sum(), 0) - - # we manually add this hypothesis since it's required for the rest - # of the function and kindof make sense - is_word_start[-1] = 0 - - p = self.prob - num_to_mask = (is_word_start.float().sum() * p).ceil().long() - num_inserts = 0 - if num_to_mask == 0: - return source - - if self.mask_span_distribution is not None: - lengths = self.mask_span_distribution.sample( - sample_shape=(num_to_mask,)) - - # Make sure we have enough to mask - cum_length = torch.cumsum(lengths, 0) - while cum_length[-1] < num_to_mask: - lengths = torch.cat([ - lengths, - self.mask_span_distribution.sample( - sample_shape=(num_to_mask,)) - ], dim=0) - cum_length = torch.cumsum(lengths, 0) - - # Trim to masking budget - i = 0 - while cum_length[i] < num_to_mask: - i += 1 - lengths[i] = num_to_mask - (0 if i == 0 else cum_length[i - 1]) - num_to_mask = i + 1 - lengths = lengths[:num_to_mask] - - # Handle 0-length mask (inserts) separately - lengths = lengths[lengths > 0] - num_inserts = num_to_mask - lengths.size(0) - num_to_mask -= num_inserts - if num_to_mask == 0: - return self.add_insertion_noise( - source, num_inserts / source.size(0)) - # assert (lengths > 0).all() - else: - raise ValueError("Not supposed to be there") - lengths = torch.ones((num_to_mask,), device=source.device).long() - # assert is_word_start[-1] == 0 - word_starts = is_word_start.nonzero(as_tuple=False) - indices = word_starts[torch.randperm(word_starts.size(0))[ - :num_to_mask]].squeeze(1) - - source_length = source.size(0) - # TODO why? - # assert source_length - 1 not in indices - to_keep = torch.ones( - source_length, - dtype=torch.bool, - device=source.device) - - is_word_start = is_word_start.long() - # acts as a long length, so spans don't go over the end of doc - is_word_start[-1] = 10e5 - if self.replace_length == 0: - to_keep[indices] = 0 - else: - # keep index, but replace it with [MASK] - source[indices] = self.mask_idx - # random ratio disabled - # source[indices[mask_random]] = torch.randint( - # 1, len(self.vocab), size=(mask_random.sum(),)) - - # if self.mask_span_distribution is not None: - # assert len(lengths.size()) == 1 - # assert lengths.size() == indices.size() - lengths -= 1 - while indices.size(0) > 0: - # assert lengths.size() == indices.size() - lengths -= is_word_start[indices + 1].long() - uncompleted = lengths >= 0 - indices = indices[uncompleted] + 1 - - # mask_random = mask_random[uncompleted] - lengths = lengths[uncompleted] - if self.replace_length != -1: - # delete token - to_keep[indices] = 0 - else: - # keep index, but replace it with [MASK] - source[indices] = self.mask_idx - # random ratio disabled - # source[indices[mask_random]] = torch.randint( - # 1, len(self.vocab), size=(mask_random.sum(),)) - # else: - # # A bit faster when all lengths are 1 - # while indices.size(0) > 0: - # uncompleted = is_word_start[indices + 1] == 0 - # indices = indices[uncompleted] + 1 - # mask_random = mask_random[uncompleted] - # if self.replace_length != -1: - # # delete token - # to_keep[indices] = 0 - # else: - # # keep index, but replace it with [MASK] - # source[indices] = self.mask_idx - # source[indices[mask_random]] = torch.randint( - # 1, len(self.vocab), size=(mask_random.sum(),)) - - # assert source_length - 1 not in indices - - source = source[to_keep] - - if num_inserts > 0: - source = self.add_insertion_noise( - source, num_inserts / source.size(0)) - - # aeq(source.eq(self.pad_idx).long().sum(), 0) - final_length = source.size(0) - return source, final_length - - def add_insertion_noise(self, tokens, p): - if p == 0.0: - return tokens - - num_tokens = tokens.size(0) - n = int(math.ceil(num_tokens * p)) - - noise_indices = torch.randperm(num_tokens + n - 2)[:n] + 1 - noise_mask = torch.zeros( - size=( - num_tokens + n, - ), - dtype=torch.bool, - device=tokens.device) - noise_mask[noise_indices] = 1 - result = torch.ones([n + len(tokens)], - dtype=torch.long, - device=tokens.device) * -1 - - # random ratio disabled - # num_random = int(math.ceil(n * self.random_ratio)) - result[noise_indices] = self.mask_idx - # result[noise_indices[:num_random]] = torch.randint( - # low=1, high=len(self.vocab), size=(num_random,)) - - result[~noise_mask] = tokens - - # assert (result >= 0).all() - return result - - -class MultiNoise(NoiseBase): - NOISES = { - "sen_shuffling": SenShufflingNoise, - "infilling": InfillingNoise, - "mask": MaskNoise - } - - def __init__(self, noises=[], probs=[], **kwargs): - assert len(noises) == len(probs) - super(MultiNoise, self).__init__(probs, **kwargs) - - self.noises = [] - for i, n in enumerate(noises): - cls = MultiNoise.NOISES.get(n) - if n is None: - raise ValueError("Unknown noise function '%s'" % n) - else: - noise = cls(probs[i], **kwargs) - self.noises.append(noise) - - def noise_source(self, source, length=None, **kwargs): - for noise in self.noises: - source, length = noise.noise_source( - source, length=length, **kwargs) - return source, length diff --git a/onmt/opts.py b/onmt/opts.py index f8d87b83f9..e07a0d38cf 100644 --- a/onmt/opts.py +++ b/onmt/opts.py @@ -2,17 +2,188 @@ from __future__ import print_function import configargparse -import onmt from onmt.models.sru import CheckSRU +from onmt.transforms import AVAILABLE_TRANSFORMS def config_opts(parser): - parser.add('-config', '--config', required=False, - is_config_file_arg=True, help='config file path') - parser.add('-save_config', '--save_config', required=False, - is_write_out_config_file_arg=True, - help='config file save path') + group = parser.add_argument_group("Configuration") + group.add('-config', '--config', required=False, + is_config_file_arg=True, + help='Path of the main YAML config file.') + group.add('-save_config', '--save_config', required=False, + is_write_out_config_file_arg=True, + help='Path where to save the config.') + + +def _add_logging_opts(parser, is_train=True): + group = parser.add_argument_group('Logging') + group.add('--log_file', '-log_file', type=str, default="", + help="Output logs to a file under this path.") + group.add('--log_file_level', '-log_file_level', type=str, + action=StoreLoggingLevelAction, + choices=StoreLoggingLevelAction.CHOICES, + default="0") + + if is_train: + group.add('--report_every', '-report_every', type=int, default=50, + help="Print stats at this interval.") + group.add('--exp_host', '-exp_host', type=str, default="", + help="Send logs to this crayon server.") + group.add('--exp', '-exp', type=str, default="", + help="Name of the experiment for logging.") + # Use Tensorboard for visualization during training + group.add('--tensorboard', '-tensorboard', action="store_true", + help="Use tensorboard for visualization during training. " + "Must have the library tensorboard >= 1.14.") + group.add("--tensorboard_log_dir", "-tensorboard_log_dir", + type=str, default="runs/onmt", + help="Log directory for Tensorboard. " + "This is also the name of the run.") + else: + # Options only during inference + group.add('--verbose', '-verbose', action="store_true", + help='Print scores and predictions for each sentence') + group.add('--attn_debug', '-attn_debug', action="store_true", + help='Print best attn for each word') + group.add('--align_debug', '-align_debug', action="store_true", + help='Print best align for each word') + group.add('--dump_beam', '-dump_beam', type=str, default="", + help='File to dump beam information to.') + group.add('--n_best', '-n_best', type=int, default=1, + help="If verbose is set, will output the n_best " + "decoded sentences") + + +def _add_reproducibility_opts(parser): + group = parser.add_argument_group('Reproducibility') + group.add('--seed', '-seed', type=int, default=-1, + help="Set random seed used for better " + "reproducibility between experiments.") + + +def _add_dynamic_corpus_opts(parser, build_vocab_only=False): + """Options related to training corpus, type: a list of dictionary.""" + group = parser.add_argument_group('Data') + group.add("-data", "--data", required=True, + help="List of datasets and their specifications. " + "See examples/*.yaml for further details.") + group.add("-skip_empty_level", "--skip_empty_level", default="warning", + choices=["silent", "warning", "error"], + help="Security level when encounter empty examples." + "silent: silently ignore/skip empty example;" + "warning: warning when ignore/skip empty example;" + "error: raise error & stop excution when encouter empty.)") + group.add("-transforms", "--transforms", default=[], nargs="+", + choices=AVAILABLE_TRANSFORMS.keys(), + help="Default transform pipeline to apply to data. " + "Can be specified in each corpus of data to override.") + + group.add("-save_data", "--save_data", required=build_vocab_only, + help="Output base path for objects that will " + "be saved (vocab, transforms, embeddings, ...).") + group.add("-overwrite", "--overwrite", action="store_true", + help="Overwrite existing objects if any.") + group.add( + '-n_sample', '--n_sample', + type=int, default=(5000 if build_vocab_only else 0), + help=("Build vocab using " if build_vocab_only else "Stop after save ") + + "this number of transformed samples/corpus. Can be [-1, 0, N>0]. " + "Set to -1 to go full corpus, 0 to skip.") + + if not build_vocab_only: + group.add('-dump_fields', '--dump_fields', action='store_true', + help="Dump fields `*.vocab.pt` to disk." + " -save_data should be set as saving prefix.") + group.add('-dump_transforms', '--dump_transforms', action='store_true', + help="Dump transforms `*.transforms.pt` to disk." + " -save_data should be set as saving prefix.") + + +def _add_dynamic_fields_opts(parser, build_vocab_only=False): + """Options related to vocabulary and fields. + + Add all options relate to vocabulary or fields to parser. + If `build_vocab_only` set to True, do not contain fields + related options which won't be used in `bin/build_vocab.py`. + """ + group = parser.add_argument_group("Vocab") + group.add("-src_vocab", "--src_vocab", + required=not(build_vocab_only), + help="Path to a vocabulary file for src." + "Format: one or \t per line.") + group.add("-tgt_vocab", "--tgt_vocab", + help="Path to a vocabulary file for tgt." + "Format: one or \t per line.") + group.add("-share_vocab", "--share_vocab", action="store_true", + help="Share source and target vocabulary.") + + if not build_vocab_only: + group.add("-src_vocab_size", "--src_vocab_size", + type=int, default=50000, + help="Maximum size of the source vocabulary.") + group.add("-tgt_vocab_size", "--tgt_vocab_size", + type=int, default=50000, + help="Maximum size of the target vocabulary") + group.add("-vocab_size_multiple", "--vocab_size_multiple", + type=int, default=1, + help="Make the vocabulary size a multiple of this value.") + + group.add("-src_words_min_frequency", "--src_words_min_frequency", + type=int, default=0, + help="Discard source words with lower frequency.") + group.add("-tgt_words_min_frequency", "--tgt_words_min_frequency", + type=int, default=0, + help="Discard target words with lower frequency.") + + # Truncation options, for text corpus + group = parser.add_argument_group("Pruning") + group.add("--src_seq_length_trunc", "-src_seq_length_trunc", + type=int, default=None, + help="Truncate source sequence length.") + group.add("--tgt_seq_length_trunc", "-tgt_seq_length_trunc", + type=int, default=None, + help="Truncate target sequence length.") + + group = parser.add_argument_group('Embeddings') + group.add('-both_embeddings', '--both_embeddings', + help="Path to the embeddings file to use " + "for both source and target tokens.") + group.add('-src_embeddings', '--src_embeddings', + help="Path to the embeddings file to use for source tokens.") + group.add('-tgt_embeddings', '--tgt_embeddings', + help="Path to the embeddings file to use for target tokens.") + group.add('-embeddings_type', '--embeddings_type', + choices=["GloVe", "word2vec"], + help="Type of embeddings file.") + + +def _add_dynamic_transform_opts(parser): + """Options related to transforms. + + Options that specified in the definitions of each transform class + at `onmt/transforms/*.py`. + """ + for name, transform_cls in AVAILABLE_TRANSFORMS.items(): + transform_cls.add_options(parser) + + +def dynamic_prepare_opts(parser, build_vocab_only=False): + """Options related to data prepare in dynamic mode. + + Add all dynamic data prepare related options to parser. + If `build_vocab_only` set to True, then only contains options that + will be used in `onmt/bin/build_vocab.py`. + """ + config_opts(parser) + _add_dynamic_corpus_opts(parser, build_vocab_only=build_vocab_only) + _add_dynamic_fields_opts(parser, build_vocab_only=build_vocab_only) + _add_dynamic_transform_opts(parser) + + if build_vocab_only: + _add_reproducibility_opts(parser) + # as for False, this will be added in _add_train_general_opts def model_opts(parser): @@ -62,10 +233,10 @@ def model_opts(parser): # Encoder-Decoder Options group = parser.add_argument_group('Model- Encoder-Decoder') group.add('--model_type', '-model_type', default='text', - choices=['text', 'img', 'audio', 'vec'], + choices=['text'], help="Type of source model to use. Allows " "the system to incorporate non-text inputs. " - "Options are [text|img|audio|vec].") + "Options are [text].") group.add('--model_dtype', '-model_dtype', default='fp32', choices=['fp32', 'fp16'], help='Data type of the model.') @@ -91,19 +262,9 @@ def model_opts(parser): help="Size of rnn hidden states. Overwrites " "enc_rnn_size and dec_rnn_size") group.add('--enc_rnn_size', '-enc_rnn_size', type=int, default=500, - help="Size of encoder rnn hidden states. " - "Must be equal to dec_rnn_size except for " - "speech-to-text.") + help="Size of encoder rnn hidden states.") group.add('--dec_rnn_size', '-dec_rnn_size', type=int, default=500, - help="Size of decoder rnn hidden states. " - "Must be equal to enc_rnn_size except for " - "speech-to-text.") - group.add('--audio_enc_pooling', '-audio_enc_pooling', - type=str, default='1', - help="The amount of pooling of audio encoder, " - "either the same amount of pooling across all layers " - "indicated by a single number, or different amounts of " - "pooling per layer separated by comma.") + help="Size of decoder rnn hidden states.") group.add('--cnn_kernel_width', '-cnn_kernel_width', type=int, default=3, help="Size of windows in the cnn, the kernel_size is " "(cnn_kernel_width, 1) in conv layer") @@ -145,11 +306,6 @@ def model_opts(parser): help='Number of nodes in the graph encoder') group.add('--n_steps', '-n_steps', type=int, default=2, help='Number of steps to advance graph encoder') - # The ggnn uses src_vocab during training because the graph is built - # using edge information which requires parsing the input sequence. - group.add('--src_vocab', '-src_vocab', default="", - help="Path to an existing source vocabulary. Format: " - "one word per line.") # Attention options group = parser.add_argument_group('Model- Attention') @@ -225,162 +381,12 @@ def model_opts(parser): "See https://nvidia.github.io/apex/amp.html#opt-levels.") -def preprocess_opts(parser): - """ Pre-procesing options """ - # Data options - group = parser.add_argument_group('Data') +def _add_train_general_opts(parser): + """ General options for training """ + group = parser.add_argument_group('General') group.add('--data_type', '-data_type', default="text", help="Type of the source input. " - "Options are [text|img|audio|vec].") - - group.add('--train_src', '-train_src', required=True, nargs='+', - help="Path(s) to the training source data") - group.add('--train_tgt', '-train_tgt', required=True, nargs='+', - help="Path(s) to the training target data") - group.add('--train_align', '-train_align', nargs='+', default=[None], - help="Path(s) to the training src-tgt alignment") - group.add('--train_ids', '-train_ids', nargs='+', default=[None], - help="ids to name training shards, used for corpus weighting") - - group.add('--valid_src', '-valid_src', - help="Path to the validation source data") - group.add('--valid_tgt', '-valid_tgt', - help="Path to the validation target data") - group.add('--valid_align', '-valid_align', default=None, - help="Path(s) to the validation src-tgt alignment") - - group.add('--src_dir', '-src_dir', default="", - help="Source directory for image or audio files.") - - group.add('--save_data', '-save_data', required=True, - help="Output file for the prepared data") - - group.add('--max_shard_size', '-max_shard_size', type=int, default=0, - help="""Deprecated use shard_size instead""") - - group.add('--shard_size', '-shard_size', type=int, default=1000000, - help="Divide src_corpus and tgt_corpus into " - "smaller multiple src_copus and tgt corpus files, then " - "build shards, each shard will have " - "opt.shard_size samples except last shard. " - "shard_size=0 means no segmentation " - "shard_size>0 means segment dataset into multiple shards, " - "each shard has shard_size samples") - - group.add('--num_threads', '-num_threads', type=int, default=1, - help="Number of shards to build in parallel.") - - group.add('--overwrite', '-overwrite', action="store_true", - help="Overwrite existing shards if any.") - - # Dictionary options, for text corpus - - group = parser.add_argument_group('Vocab') - # if you want to pass an existing vocab.pt file, pass it to - # -src_vocab alone as it already contains tgt vocab. - group.add('--src_vocab', '-src_vocab', default="", - help="Path to an existing source vocabulary. Format: " - "one word per line.") - group.add('--tgt_vocab', '-tgt_vocab', default="", - help="Path to an existing target vocabulary. Format: " - "one word per line.") - group.add('--features_vocabs_prefix', '-features_vocabs_prefix', - type=str, default='', - help="Path prefix to existing features vocabularies") - group.add('--src_vocab_size', '-src_vocab_size', type=int, default=50000, - help="Size of the source vocabulary") - group.add('--tgt_vocab_size', '-tgt_vocab_size', type=int, default=50000, - help="Size of the target vocabulary") - group.add('--vocab_size_multiple', '-vocab_size_multiple', - type=int, default=1, - help="Make the vocabulary size a multiple of this value") - - group.add('--src_words_min_frequency', - '-src_words_min_frequency', type=int, default=0) - group.add('--tgt_words_min_frequency', - '-tgt_words_min_frequency', type=int, default=0) - - group.add('--dynamic_dict', '-dynamic_dict', action='store_true', - help="Create dynamic dictionaries") - group.add('--share_vocab', '-share_vocab', action='store_true', - help="Share source and target vocabulary") - - # Truncation options, for text corpus - group = parser.add_argument_group('Pruning') - group.add('--src_seq_length', '-src_seq_length', type=int, default=50, - help="Maximum source sequence length") - group.add('--src_seq_length_trunc', '-src_seq_length_trunc', - type=int, default=None, - help="Truncate source sequence length.") - group.add('--tgt_seq_length', '-tgt_seq_length', type=int, default=50, - help="Maximum target sequence length to keep.") - group.add('--tgt_seq_length_trunc', '-tgt_seq_length_trunc', - type=int, default=None, - help="Truncate target sequence length.") - group.add('--lower', '-lower', action='store_true', help='lowercase data') - group.add('--filter_valid', '-filter_valid', action='store_true', - help='Filter validation data by src and/or tgt length') - - # Data processing options - group = parser.add_argument_group('Random') - group.add('--shuffle', '-shuffle', type=int, default=0, - help="Shuffle data") - group.add('--seed', '-seed', type=int, default=3435, - help="Random seed") - - group = parser.add_argument_group('Logging') - group.add('--report_every', '-report_every', type=int, default=100000, - help="Report status every this many sentences") - group.add('--log_file', '-log_file', type=str, default="", - help="Output logs to a file under this path.") - group.add('--log_file_level', '-log_file_level', type=str, - action=StoreLoggingLevelAction, - choices=StoreLoggingLevelAction.CHOICES, - default="0") - - # Options most relevant to speech - group = parser.add_argument_group('Speech') - group.add('--sample_rate', '-sample_rate', type=int, default=16000, - help="Sample rate.") - group.add('--window_size', '-window_size', type=float, default=.02, - help="Window size for spectrogram in seconds.") - group.add('--window_stride', '-window_stride', type=float, default=.01, - help="Window stride for spectrogram in seconds.") - group.add('--window', '-window', default='hamming', - help="Window type for spectrogram generation.") - - # Option most relevant to image input - group.add('--image_channel_size', '-image_channel_size', - type=int, default=3, - choices=[3, 1], - help="Using grayscale image can training " - "model faster and smaller") - - # Options for experimental source noising (BART style) - group = parser.add_argument_group('Noise') - group.add('--subword_prefix', '-subword_prefix', - type=str, default="▁", - help="subword prefix to build wordstart mask") - group.add('--subword_prefix_is_joiner', '-subword_prefix_is_joiner', - action='store_true', - help="mask will need to be inverted if prefix is joiner") - - -def train_opts(parser): - """ Training and saving options """ - - group = parser.add_argument_group('General') - group.add('--data', '-data', required=True, - help='Path prefix to the ".train.pt" and ' - '".valid.pt" file path from preprocess.py') - - group.add('--data_ids', '-data_ids', nargs='+', default=[None], - help="In case there are several corpora.") - group.add('--data_weights', '-data_weights', type=int, nargs='+', - default=[1], help="""Weights of different corpora, - should follow the same order as in -data_ids.""") - group.add('--data_to_noise', '-data_to_noise', nargs='+', default=[], - help="IDs of datasets on which to apply noise.") + "Options are [text].") group.add('--save_model', '-save_model', default='model', help="Model filename (the model will be saved as " @@ -412,9 +418,7 @@ def train_opts(parser): group.add('--queue_size', '-queue_size', default=40, type=int, help="Size of queue for each process in producer/consumer") - group.add('--seed', '-seed', type=int, default=-1, - help="Random seed used for the experiments " - "reproducibility.") + _add_reproducibility_opts(parser) # Init options group = parser.add_argument_group('Initialization') @@ -557,12 +561,6 @@ def train_opts(parser): help="Step for moving average. " "Default is every update, " "if -average_decay is set.") - group.add("--src_noise", "-src_noise", type=str, nargs='+', - default=[], - choices=onmt.modules.source_noise.MultiNoise.NOISES.keys()) - group.add("--src_noise_prob", "-src_noise_prob", type=float, nargs='+', - default=[], - help="Probabilities of src_noise functions") # learning rate group = parser.add_argument_group('Optimization- Rate') @@ -587,41 +585,91 @@ def train_opts(parser): help="Use a custom decay rate.") group.add('--warmup_steps', '-warmup_steps', type=int, default=4000, help="Number of warmup steps for custom decay.") + _add_logging_opts(parser, is_train=True) - group = parser.add_argument_group('Logging') - group.add('--report_every', '-report_every', type=int, default=50, - help="Print stats at this interval.") - group.add('--log_file', '-log_file', type=str, default="", - help="Output logs to a file under this path.") - group.add('--log_file_level', '-log_file_level', type=str, - action=StoreLoggingLevelAction, - choices=StoreLoggingLevelAction.CHOICES, - default="0") - group.add('--exp_host', '-exp_host', type=str, default="", - help="Send logs to this crayon server.") - group.add('--exp', '-exp', type=str, default="", - help="Name of the experiment for logging.") - # Use Tensorboard for visualization during training - group.add('--tensorboard', '-tensorboard', action="store_true", - help="Use tensorboard for visualization during training. " - "Must have the library tensorboard >= 1.14.") - group.add("--tensorboard_log_dir", "-tensorboard_log_dir", - type=str, default="runs/onmt", - help="Log directory for Tensorboard. " - "This is also the name of the run.") - - group = parser.add_argument_group('Speech') - # Options most relevant to speech - group.add('--sample_rate', '-sample_rate', type=int, default=16000, - help="Sample rate.") - group.add('--window_size', '-window_size', type=float, default=.02, - help="Window size for spectrogram in seconds.") - - # Option most relevant to image input - group.add('--image_channel_size', '-image_channel_size', - type=int, default=3, choices=[3, 1], - help="Using grayscale image can training " - "model faster and smaller") + +def _add_train_dynamic_data(parser): + group = parser.add_argument_group("Dynamic data") + group.add("-bucket_size", "--bucket_size", type=int, default=2048, + help="Examples per dynamically generated torchtext Dataset.") + + +def train_opts(parser): + """All options used in train.""" + # options relate to data preprare + dynamic_prepare_opts(parser, build_vocab_only=False) + # options relate to train + model_opts(parser) + _add_train_general_opts(parser) + _add_train_dynamic_data(parser) + + +def _add_decoding_opts(parser): + group = parser.add_argument_group('Decoding tricks') + group.add('--block_ngram_repeat', '-block_ngram_repeat', + type=int, default=0, + help='Block repetition of ngrams during decoding.') + group.add('--ignore_when_blocking', '-ignore_when_blocking', + nargs='+', type=str, default=[], + help="Ignore these strings when blocking repeats. " + "You want to block sentence delimiters.") + group.add('--replace_unk', '-replace_unk', action="store_true", + help="Replace the generated UNK tokens with the " + "source token that had highest attention weight. If " + "phrase_table is provided, it will look up the " + "identified source token and give the corresponding " + "target token. If it is not provided (or the identified " + "source token does not exist in the table), then it " + "will copy the source token.") + group.add('--phrase_table', '-phrase_table', type=str, default="", + help="If phrase_table is provided (with replace_unk), it will " + "look up the identified source token and give the " + "corresponding target token. If it is not provided " + "(or the identified source token does not exist in " + "the table), then it will copy the source token.") + + group = parser.add_argument_group('Random Sampling') + group.add('--random_sampling_topk', '-random_sampling_topk', + default=1, type=int, + help="Set this to -1 to do random sampling from full " + "distribution. Set this to value k>1 to do random " + "sampling restricted to the k most likely next tokens. " + "Set this to 1 to use argmax or for doing beam " + "search.") + group.add('--random_sampling_temp', '-random_sampling_temp', + default=1., type=float, + help="If doing random sampling, divide the logits by " + "this before computing softmax during decoding.") + _add_reproducibility_opts(parser) + + group = parser.add_argument_group('Beam Search') + group.add('--beam_size', '-beam_size', type=int, default=5, + help='Beam size') + group.add('--min_length', '-min_length', type=int, default=0, + help='Minimum prediction length') + group.add('--max_length', '-max_length', type=int, default=100, + help='Maximum prediction length.') + group.add('--max_sent_length', '-max_sent_length', action=DeprecateAction, + help="Deprecated, use `-max_length` instead") + + # Alpha and Beta values for Google Length + Coverage penalty + # Described here: https://arxiv.org/pdf/1609.08144.pdf, Section 7 + group.add('--stepwise_penalty', '-stepwise_penalty', action='store_true', + help="Apply penalty at every decoding step. " + "Helpful for summary penalty.") + group.add('--length_penalty', '-length_penalty', default='none', + choices=['none', 'wu', 'avg'], + help="Length Penalty to use.") + group.add('--ratio', '-ratio', type=float, default=-0., + help="Ratio based beam stop condition") + group.add('--coverage_penalty', '-coverage_penalty', default='none', + choices=['none', 'wu', 'summary'], + help="Coverage Penalty to use.") + group.add('--alpha', '-alpha', type=float, default=0., + help="Google NMT length penalty parameter " + "(higher = longer generation)") + group.add('--beta', '-beta', type=float, default=-0., + help="Coverage penalty parameter") def translate_opts(parser): @@ -645,13 +693,11 @@ def translate_opts(parser): group = parser.add_argument_group('Data') group.add('--data_type', '-data_type', default="text", - help="Type of the source input. Options: [text|img].") + help="Type of the source input. Options: [text].") group.add('--src', '-src', required=True, help="Source sequence to decode (one line per " "sequence)") - group.add('--src_dir', '-src_dir', default="", - help='Source directory for image or audio files') group.add('--tgt', '-tgt', help='True target sequence (optional)') group.add('--tgt_prefix', '-tgt_prefix', action='store_true', @@ -672,94 +718,11 @@ def translate_opts(parser): group.add('--report_time', '-report_time', action='store_true', help="Report some translation time metrics") - # Options most relevant to summarization. - group.add('--dynamic_dict', '-dynamic_dict', action='store_true', - help="Create dynamic dictionaries") - group.add('--share_vocab', '-share_vocab', action='store_true', - help="Share source and target vocabulary") - - group = parser.add_argument_group('Random Sampling') - group.add('--random_sampling_topk', '-random_sampling_topk', - default=1, type=int, - help="Set this to -1 to do random sampling from full " - "distribution. Set this to value k>1 to do random " - "sampling restricted to the k most likely next tokens. " - "Set this to 1 to use argmax or for doing beam " - "search.") - group.add('--random_sampling_temp', '-random_sampling_temp', - default=1., type=float, - help="If doing random sampling, divide the logits by " - "this before computing softmax during decoding.") - group.add('--seed', '-seed', type=int, default=829, - help="Random seed") + # Adding options relate to decoding strategy + _add_decoding_opts(parser) - group = parser.add_argument_group('Beam') - group.add('--beam_size', '-beam_size', type=int, default=5, - help='Beam size') - group.add('--min_length', '-min_length', type=int, default=0, - help='Minimum prediction length') - group.add('--max_length', '-max_length', type=int, default=100, - help='Maximum prediction length.') - group.add('--max_sent_length', '-max_sent_length', action=DeprecateAction, - help="Deprecated, use `-max_length` instead") - - # Alpha and Beta values for Google Length + Coverage penalty - # Described here: https://arxiv.org/pdf/1609.08144.pdf, Section 7 - group.add('--stepwise_penalty', '-stepwise_penalty', action='store_true', - help="Apply penalty at every decoding step. " - "Helpful for summary penalty.") - group.add('--length_penalty', '-length_penalty', default='none', - choices=['none', 'wu', 'avg'], - help="Length Penalty to use.") - group.add('--ratio', '-ratio', type=float, default=-0., - help="Ratio based beam stop condition") - group.add('--coverage_penalty', '-coverage_penalty', default='none', - choices=['none', 'wu', 'summary'], - help="Coverage Penalty to use.") - group.add('--alpha', '-alpha', type=float, default=0., - help="Google NMT length penalty parameter " - "(higher = longer generation)") - group.add('--beta', '-beta', type=float, default=-0., - help="Coverage penalty parameter") - group.add('--block_ngram_repeat', '-block_ngram_repeat', - type=int, default=0, - help='Block repetition of ngrams during decoding.') - group.add('--ignore_when_blocking', '-ignore_when_blocking', - nargs='+', type=str, default=[], - help="Ignore these strings when blocking repeats. " - "You want to block sentence delimiters.") - group.add('--replace_unk', '-replace_unk', action="store_true", - help="Replace the generated UNK tokens with the " - "source token that had highest attention weight. If " - "phrase_table is provided, it will look up the " - "identified source token and give the corresponding " - "target token. If it is not provided (or the identified " - "source token does not exist in the table), then it " - "will copy the source token.") - group.add('--phrase_table', '-phrase_table', type=str, default="", - help="If phrase_table is provided (with replace_unk), it will " - "look up the identified source token and give the " - "corresponding target token. If it is not provided " - "(or the identified source token does not exist in " - "the table), then it will copy the source token.") - group = parser.add_argument_group('Logging') - group.add('--verbose', '-verbose', action="store_true", - help='Print scores and predictions for each sentence') - group.add('--log_file', '-log_file', type=str, default="", - help="Output logs to a file under this path.") - group.add('--log_file_level', '-log_file_level', type=str, - action=StoreLoggingLevelAction, - choices=StoreLoggingLevelAction.CHOICES, - default="0") - group.add('--attn_debug', '-attn_debug', action="store_true", - help='Print best attn for each word') - group.add('--align_debug', '-align_debug', action="store_true", - help='Print best align for each word') - group.add('--dump_beam', '-dump_beam', type=str, default="", - help='File to dump beam information to.') - group.add('--n_best', '-n_best', type=int, default=1, - help="If verbose is set, will output the n_best " - "decoded sentences") + # Adding option for logging + _add_logging_opts(parser, is_train=False) group = parser.add_argument_group('Efficiency') group.add('--batch_size', '-batch_size', type=int, default=30, @@ -771,23 +734,6 @@ def translate_opts(parser): group.add('--gpu', '-gpu', type=int, default=-1, help="Device to run on") - # Options most relevant to speech. - group = parser.add_argument_group('Speech') - group.add('--sample_rate', '-sample_rate', type=int, default=16000, - help="Sample rate.") - group.add('--window_size', '-window_size', type=float, default=.02, - help='Window size for spectrogram in seconds') - group.add('--window_stride', '-window_stride', type=float, default=.01, - help='Window stride for spectrogram in seconds') - group.add('--window', '-window', default='hamming', - help='Window type for spectrogram generation') - - # Option most relevant to image input - group.add('--image_channel_size', '-image_channel_size', - type=int, default=3, choices=[3, 1], - help="Using grayscale image can training " - "model faster and smaller") - # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be diff --git a/onmt/tests/pull_request_chk.sh b/onmt/tests/pull_request_chk.sh index fb89fe5417..178c1622fd 100755 --- a/onmt/tests/pull_request_chk.sh +++ b/onmt/tests/pull_request_chk.sh @@ -11,6 +11,7 @@ PROJECT_ROOT=`dirname "$0"`"/../../" DATA_DIR="$PROJECT_ROOT/data" TEST_DIR="$PROJECT_ROOT/onmt/tests" PYTHON="python3" +TMP_OUT_DIR="/tmp/onmt_prchk" clean_up() { @@ -19,17 +20,13 @@ clean_up() fi if [[ "${SKIP_FULL_CLEAN}" == "1" ]]; then # delete any .pt's that weren't downloaded - ls /tmp/*.pt | grep -vE "test_model_speech.pt|test_model_im2text.pt" | xargs -I {} rm -f /tmp/{} + ls $TMP_OUT_DIR/*.pt | xargs -I {} rm -f $TMP_OUT_DIR/{} else # delete all .pt's - rm -f /tmp/*.pt + rm -f $TMP_OUT_DIR/*.pt + rm -rf $TMP_OUT_DIR/sample + rm -d $TMP_OUT_DIR fi - if [[ "${SKIP_FULL_CLEAN}" != "1" ]]; then - rm -rf /tmp/im2text - rm -rf /tm/speech - fi - rm -f /tmp/im2text.tgz - rm -f /tmp/speech.tgz } trap clean_up SIGINT SIGQUIT SIGKILL @@ -41,39 +38,10 @@ error_exit() exit 1 } -environment_prepare() -{ - # Download img2text corpus - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -d /tmp/im2text ]]; then - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -f /tmp/im2text.tgz ]]; then - wget -q -O /tmp/im2text.tgz http://lstm.seas.harvard.edu/latex/im2text_small.tgz - fi - tar zxf /tmp/im2text.tgz -C /tmp/ - fi - head /tmp/im2text/src-train.txt > /tmp/im2text/src-train-head.txt - head /tmp/im2text/tgt-train.txt > /tmp/im2text/tgt-train-head.txt - head /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt - head /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt - - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -f /tmp/test_model_speech.pt ]]; then - wget -q -O /tmp/test_model_speech.pt http://lstm.seas.harvard.edu/latex/model_step_2760.pt - fi - # Download speech2text corpus - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -d /tmp/speech ]]; then - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -f /tmp/speech.tgz ]]; then - wget -q -O /tmp/speech.tgz http://lstm.seas.harvard.edu/latex/speech.tgz - fi - tar zxf /tmp/speech.tgz -C /tmp/ - fi - head /tmp/speech/src-train.txt > /tmp/speech/src-train-head.txt - head /tmp/speech/tgt-train.txt > /tmp/speech/tgt-train-head.txt - head /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt - head /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt +# environment_prepare() +# { - if [[ "${SKIP_DOWNLOADS}" != "1" || ! -f /tmp/test_model_im2text.pt ]]; then - wget -q -O /tmp/test_model_im2text.pt http://lstm.seas.harvard.edu/latex/test_model_im2text.pt - fi -} +# } # flake8 check echo -n "[+] Doing flake8 check..." @@ -82,13 +50,6 @@ ${PYTHON} -m flake8 >> ${LOG_FILE} 2>&1 echo "Succeeded" | tee -a ${LOG_FILE} -# Environment prepartion -echo -n "[+] Preparing for test..." -environment_prepare -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - - # unittest echo -n "[+] Doing unittest test..." ${PYTHON} -m unittest discover >> ${LOG_FILE} 2>&1 @@ -97,246 +58,170 @@ echo "Succeeded" | tee -a ${LOG_FILE} # -# Preprocess test +# Get Vocabulary test # -echo "[+] Doing preprocess test..." - -echo -n " [+] Testing NMT preprocessing..." -rm -rf /tmp/data*pt -${PYTHON} preprocess.py -train_src ${DATA_DIR}/src-train.txt \ - -train_tgt ${DATA_DIR}/tgt-train.txt \ - -valid_src ${DATA_DIR}/src-val.txt \ - -valid_tgt ${DATA_DIR}/tgt-val.txt \ - -save_data /tmp/data \ - -src_vocab_size 1000 \ - -tgt_vocab_size 1000 >> ${LOG_FILE} 2>&1 +echo -n "[+] Testing vocabulary building..." +PYTHONPATH=${PROJECT_ROOT}:${PYTHONPATH} ${PYTHON} onmt/bin/build_vocab.py \ + -config ${DATA_DIR}/data.yaml \ + -save_data $TMP_OUT_DIR/onmt \ + -n_sample 5000 >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm -r $TMP_OUT_DIR/sample -echo -n " [+] Testing img2text preprocessing..." -rm -rf /tmp/im2text/data*pt -${PYTHON} preprocess.py -data_type img \ - -src_dir /tmp/im2text/images \ - -train_src /tmp/im2text/src-train.txt \ - -train_tgt /tmp/im2text/tgt-train.txt \ - -valid_src /tmp/im2text/src-val.txt \ - -valid_tgt /tmp/im2text/tgt-val.txt \ - -save_data /tmp/im2text/data >> ${LOG_FILE} 2>&1 -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - -echo -n " [+] Testing speech2text preprocessing..." -rm -rf /tmp/speech/data*pt -${PYTHON} preprocess.py -data_type audio \ - -src_dir /tmp/speech/an4_dataset \ - -train_src /tmp/speech/src-train.txt \ - -train_tgt /tmp/speech/tgt-train.txt \ - -valid_src /tmp/speech/src-val.txt \ - -valid_tgt /tmp/speech/tgt-val.txt \ - -save_data /tmp/speech/data >> ${LOG_FILE} 2>&1 +# +# Training test +# +echo -n "[+] Testing NMT fields/transforms prepare..." +${PYTHON} onmt/bin/train.py \ + -config ${DATA_DIR}/data.yaml \ + -save_data $TMP_OUT_DIR/onmt.train.check \ + -dump_fields -dump_transforms -n_sample 30 \ + -src_vocab $TMP_OUT_DIR/onmt.vocab.src \ + -tgt_vocab $TMP_OUT_DIR/onmt.vocab.tgt \ + -src_vocab_size 1000 \ + -tgt_vocab_size 1000 >> ${LOG_FILE} 2>&1 +[ "$?" -eq 0 ] || error_exit +echo "Succeeded" | tee -a ${LOG_FILE} +# rm $TMP_OUT_DIR/onmt.train.check* # used in tool testing + +echo "[+] Doing Training test..." + +echo -n " [+] Testing NMT training..." +${PYTHON} onmt/bin/train.py \ + -config ${DATA_DIR}/data.yaml \ + -src_vocab $TMP_OUT_DIR/onmt.vocab.src \ + -tgt_vocab $TMP_OUT_DIR/onmt.vocab.tgt \ + -src_vocab_size 1000 \ + -tgt_vocab_size 1000 \ + -rnn_size 2 -batch_size 10 \ + -word_vec_size 5 -report_every 5 \ + -rnn_size 10 -train_steps 10 >> ${LOG_FILE} 2>&1 +[ "$?" -eq 0 ] || error_exit +echo "Succeeded" | tee -a ${LOG_FILE} + +echo -n " [+] Testing NMT training w/ copy..." +${PYTHON} onmt/bin/train.py \ + -config ${DATA_DIR}/data.yaml \ + -src_vocab $TMP_OUT_DIR/onmt.vocab.src \ + -tgt_vocab $TMP_OUT_DIR/onmt.vocab.tgt \ + -src_vocab_size 1000 \ + -tgt_vocab_size 1000 \ + -rnn_size 2 -batch_size 10 \ + -word_vec_size 5 -report_every 5 \ + -rnn_size 10 -train_steps 10 \ + -copy_attn >> ${LOG_FILE} 2>&1 +[ "$?" -eq 0 ] || error_exit +echo "Succeeded" | tee -a ${LOG_FILE} + +echo -n " [+] Testing NMT training w/ align..." +${PYTHON} onmt/bin/train.py \ + -config ${DATA_DIR}/align_data.yaml \ + -src_vocab $TMP_OUT_DIR/onmt.vocab.src \ + -tgt_vocab $TMP_OUT_DIR/onmt.vocab.tgt \ + -src_vocab_size 1000 \ + -tgt_vocab_size 1000 \ + -max_generator_batches 0 \ + -encoder_type transformer -decoder_type transformer \ + -layers 4 -word_vec_size 16 -rnn_size 16 -heads 2 -transformer_ff 64 \ + -lambda_align 0.05 -alignment_layer 2 -alignment_heads 0 \ + -report_every 5 -train_steps 10 >> ${LOG_FILE} 2>&1 +[ "$?" -eq 0 ] || error_exit +echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/onmt.vocab* + +echo -n " [+] Testing Graph Neural Network training..." +${PYTHON} onmt/bin/train.py \ + -config ${DATA_DIR}/ggnn_data.yaml \ + -src_seq_length 1000 -tgt_seq_length 30 \ + -encoder_type ggnn -layers 2 \ + -decoder_type rnn -rnn_size 256 \ + -learning_rate 0.1 -learning_rate_decay 0.8 \ + -global_attention general -batch_size 32 -word_vec_size 256 \ + -bridge -train_steps 10 -n_edge_types 9 -state_dim 256 \ + -n_steps 10 -n_node 64 >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} - # # Translation test # echo "[+] Doing translation test..." echo -n " [+] Testing NMT translation..." -head ${DATA_DIR}/src-test.txt > /tmp/src-test.txt -${PYTHON} translate.py -model ${TEST_DIR}/test_model.pt -src /tmp/src-test.txt -verbose >> ${LOG_FILE} 2>&1 +head ${DATA_DIR}/src-test.txt > $TMP_OUT_DIR/src-test.txt +${PYTHON} translate.py -model ${TEST_DIR}/test_model.pt -src $TMP_OUT_DIR/src-test.txt -verbose >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/src-test.txt echo -n " [+] Testing NMT ensemble translation..." -head ${DATA_DIR}/src-test.txt > /tmp/src-test.txt +head ${DATA_DIR}/src-test.txt > $TMP_OUT_DIR/src-test.txt ${PYTHON} translate.py -model ${TEST_DIR}/test_model.pt ${TEST_DIR}/test_model.pt \ - -src /tmp/src-test.txt -verbose >> ${LOG_FILE} 2>&1 -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - -echo -n " [+] Testing img2text translation..." -head /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt -head /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt -${PYTHON} translate.py -data_type img \ - -src_dir /tmp/im2text/images \ - -model /tmp/test_model_im2text.pt \ - -src /tmp/im2text/src-val-head.txt \ - -tgt /tmp/im2text/tgt-val-head.txt \ - -verbose -out /tmp/im2text/trans >> ${LOG_FILE} 2>&1 -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - -echo -n " [+] Testing speech2text translation..." -head /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt -head /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt -${PYTHON} translate.py -data_type audio \ - -src_dir /tmp/speech/an4_dataset \ - -model /tmp/test_model_speech.pt \ - -src /tmp/speech/src-val-head.txt \ - -tgt /tmp/speech/tgt-val-head.txt \ - -verbose -out /tmp/speech/trans >> ${LOG_FILE} 2>&1 -diff /tmp/speech/tgt-val-head.txt /tmp/speech/trans + -src $TMP_OUT_DIR/src-test.txt -verbose >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/src-test.txt - -# NMT Preprocess + Train + Translation test -echo -n "[+] Doing NMT {preprocess + train + translation} test..." -head ${DATA_DIR}/src-val.txt > /tmp/src-val.txt -head ${DATA_DIR}/tgt-val.txt > /tmp/tgt-val.txt -rm -rf /tmp/q*pt -${PYTHON} preprocess.py -train_src /tmp/src-val.txt \ - -train_tgt /tmp/tgt-val.txt \ - -valid_src /tmp/src-val.txt \ - -valid_tgt /tmp/tgt-val.txt \ - -save_data /tmp/q \ - -src_vocab_size 1000 \ - -tgt_vocab_size 1000 >> ${LOG_FILE} 2>&1 -${PYTHON} train.py -data /tmp/q -rnn_size 2 -batch_size 10 \ - -word_vec_size 5 -report_every 5 \ - -rnn_size 10 -train_steps 10 >> ${LOG_FILE} 2>&1 -${PYTHON} translate.py -model ${TEST_DIR}/test_model2.pt \ - -src ${DATA_DIR}/morph/src.valid \ - -verbose -batch_size 10 \ - -beam_size 10 \ - -tgt ${DATA_DIR}/morph/tgt.valid \ - -out /tmp/trans >> ${LOG_FILE} 2>&1 -diff ${DATA_DIR}/morph/tgt.valid /tmp/trans -[ "$?" -eq 0 ] || error_exit - +echo -n " [+] Testing NMT translation w/ Beam search..." ${PYTHON} translate.py -model ${TEST_DIR}/test_model2.pt \ - -src ${DATA_DIR}/morph/src.valid \ - -verbose -batch_size 10 \ - -beam_size 1 \ - -seed 1 \ - -random_sampling_topk=-1 \ - -random_sampling_temp=0.0001 \ - -tgt ${DATA_DIR}/morph/tgt.valid \ - -out /tmp/trans >> ${LOG_FILE} 2>&1 -diff ${DATA_DIR}/morph/tgt.valid /tmp/trans -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - - -# NMT Preprocess w/sharding + Train w/copy -echo -n "[+] Doing NMT {preprocess w/sharding + train w/copy} test..." -head ${DATA_DIR}/src-val.txt > /tmp/src-val.txt -head ${DATA_DIR}/tgt-val.txt > /tmp/tgt-val.txt -rm -rf /tmp/q*pt -${PYTHON} preprocess.py -train_src /tmp/src-val.txt \ - -train_tgt /tmp/tgt-val.txt \ - -valid_src /tmp/src-val.txt \ - -valid_tgt /tmp/tgt-val.txt \ - -save_data /tmp/q \ - -src_vocab_size 1000 \ - -tgt_vocab_size 1000 \ - -shard_size 1 \ - -dynamic_dict >> ${LOG_FILE} 2>&1 -${PYTHON} train.py -data /tmp/q -rnn_size 2 -batch_size 10 \ - -word_vec_size 5 -report_every 5 \ - -rnn_size 10 -train_steps 10 -copy_attn >> ${LOG_FILE} 2>&1 -[ "$?" -eq 0 ] || error_exit -echo "Succeeded" | tee -a ${LOG_FILE} - - -echo -n "[+] Doing im2text {preprocess w/sharding + train} test..." -head /tmp/im2text/src-val.txt > /tmp/im2text/src-val-head.txt -head /tmp/im2text/tgt-val.txt > /tmp/im2text/tgt-val-head.txt -rm -rf /tmp/im2text/q*pt -${PYTHON} preprocess.py -data_type img \ - -src_dir /tmp/im2text/images \ - -train_src /tmp/im2text/src-val-head.txt \ - -train_tgt /tmp/im2text/tgt-val-head.txt \ - -valid_src /tmp/im2text/src-val-head.txt \ - -valid_tgt /tmp/im2text/tgt-val-head.txt \ - -shard_size 5 \ - -save_data /tmp/im2text/q >> ${LOG_FILE} 2>&1 -${PYTHON} train.py -model_type img \ - -data /tmp/im2text/q -rnn_size 2 -batch_size 10 \ - -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 >> ${LOG_FILE} 2>&1 + -src ${DATA_DIR}/morph/src.valid \ + -verbose -batch_size 10 \ + -beam_size 10 \ + -tgt ${DATA_DIR}/morph/tgt.valid \ + -out $TMP_OUT_DIR/trans_beam >> ${LOG_FILE} 2>&1 +diff ${DATA_DIR}/morph/tgt.valid $TMP_OUT_DIR/trans_beam [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/trans_beam - -echo -n "[+] Doing speech2text {preprocess + train} test..." -head /tmp/speech/src-val.txt > /tmp/speech/src-val-head.txt -head /tmp/speech/tgt-val.txt > /tmp/speech/tgt-val-head.txt -rm -rf /tmp/speech/q*pt -${PYTHON} preprocess.py -data_type audio \ - -src_dir /tmp/speech/an4_dataset \ - -train_src /tmp/speech/src-val-head.txt \ - -train_tgt /tmp/speech/tgt-val-head.txt \ - -valid_src /tmp/speech/src-val-head.txt \ - -valid_tgt /tmp/speech/tgt-val-head.txt \ - -shard_size 50 \ - -save_data /tmp/speech/q >> ${LOG_FILE} 2>&1 -${PYTHON} train.py -model_type audio \ - -data /tmp/speech/q -rnn_size 2 -batch_size 10 \ - -word_vec_size 5 -report_every 5 -rnn_size 10 -train_steps 10 >> ${LOG_FILE} 2>&1 +echo -n " [+] Testing NMT translation w/ Random Sampling..." +${PYTHON} translate.py -model ${TEST_DIR}/test_model2.pt \ + -src ${DATA_DIR}/morph/src.valid \ + -verbose -batch_size 10 \ + -beam_size 1 \ + -seed 1 \ + -random_sampling_topk=-1 \ + -random_sampling_temp=0.0001 \ + -tgt ${DATA_DIR}/morph/tgt.valid \ + -out $TMP_OUT_DIR/trans_sampling >> ${LOG_FILE} 2>&1 +diff ${DATA_DIR}/morph/tgt.valid $TMP_OUT_DIR/trans_sampling [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/trans_sampling - -echo -n "[+] Doing create vocabulary {preprocess + create_vocabulary} test..." -rm /tmp/src-train.txt -rm /tmp/tgt-train.txt -rm /tmp/src-val.txt -rm /tmp/tgt-val.txt -head ${DATA_DIR}/src-train.txt > /tmp/src-train.txt -head ${DATA_DIR}/tgt-train.txt > /tmp/tgt-train.txt -head ${DATA_DIR}/src-val.txt > /tmp/src-val.txt -head ${DATA_DIR}/tgt-val.txt > /tmp/tgt-val.txt - -rm -rf /tmp/q*pt -${PYTHON} preprocess.py -train_src /tmp/src-train.txt \ - -train_tgt /tmp/tgt-train.txt \ - -valid_src /tmp/src-val.txt \ - -valid_tgt /tmp/tgt-val.txt \ - -save_data /tmp/q >> ${LOG_FILE} 2>&1 -PYTHONPATH=${PROJECT_ROOT}:${PYTHONPATH} ${PYTHON} ./tools/create_vocabulary.py -file /tmp/q.vocab.pt \ - -file_type field -out_file /tmp/vocab.txt -side src >> ${LOG_FILE} 2>&1 +# +# Tools test +# +echo "[+] Doing tools test..." +echo -n " [+] Doing extract vocabulary test..." +PYTHONPATH=${PROJECT_ROOT}:${PYTHONPATH} ${PYTHON} ./tools/extract_vocabulary.py \ + -file $TMP_OUT_DIR/onmt.train.check.vocab.pt -file_type field -side src \ + -out_file $TMP_OUT_DIR/vocab.txt >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit -if ! wc -l /tmp/vocab.txt | grep -qF "181"; then +if ! wc -l $TMP_OUT_DIR/vocab.txt | grep -qF "1002"; then echo -n "wrong word count\n" >> ${LOG_FILE} - wc -l /tmp/vocab.txt >> ${LOG_FILE} + wc -l $TMP_OUT_DIR/vocab.txt >> ${LOG_FILE} error_exit fi echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/vocab.txt - -echo -n "[+] Doing embedding to torch {preprocess + embeddings_to_torch} test..." -rm /tmp/src-train.txt -rm /tmp/tgt-train.txt -rm /tmp/src-val.txt -rm /tmp/tgt-val.txt -head ${DATA_DIR}/src-train.txt > /tmp/src-train.txt -head ${DATA_DIR}/tgt-train.txt > /tmp/tgt-train.txt -head ${DATA_DIR}/src-val.txt > /tmp/src-val.txt -head ${DATA_DIR}/tgt-val.txt > /tmp/tgt-val.txt - -rm -rf /tmp/q*pt -${PYTHON} preprocess.py -train_src /tmp/src-train.txt \ - -train_tgt /tmp/tgt-train.txt \ - -valid_src /tmp/src-val.txt \ - -valid_tgt /tmp/tgt-val.txt \ - -save_data /tmp/q >> ${LOG_FILE} 2>&1 +echo -n " [+] Doing embeddings to torch test..." PYTHONPATH=${PROJECT_ROOT}:${PYTHONPATH} ${PYTHON} ./tools/embeddings_to_torch.py \ -emb_file_enc ${TEST_DIR}/sample_glove.txt \ -emb_file_dec ${TEST_DIR}/sample_glove.txt \ - -dict_file /tmp/q.vocab.pt \ - -output_file /tmp/q_gloveembeddings >> ${LOG_FILE} 2>&1 + -dict_file $TMP_OUT_DIR/onmt.train.check.vocab.pt \ + -output_file $TMP_OUT_DIR/q_gloveembeddings >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} +rm $TMP_OUT_DIR/q_gloveembeddings* - -echo -n "[+] Doing extract embeddings test..." +echo -n " [+] Doing extract embeddings test..." PYTHONPATH=${PROJECT_ROOT}:${PYTHONPATH} ${PYTHON} tools/extract_embeddings.py \ -model onmt/tests/test_model.pt >> ${LOG_FILE} 2>&1 [ "$?" -eq 0 ] || error_exit echo "Succeeded" | tee -a ${LOG_FILE} - # Finally, clean up clean_up diff --git a/onmt/tests/rebuild_test_models.sh b/onmt/tests/rebuild_test_models.sh index 7359d5bb95..8b40562c93 100755 --- a/onmt/tests/rebuild_test_models.sh +++ b/onmt/tests/rebuild_test_models.sh @@ -11,7 +11,7 @@ $my_python train.py -data data/data -save_model tmp -world_size 1 -gpu_ranks 0 - #-truncated_decoder 5 #-label_smoothing 0.1 -mv tmp*e10.pt onmt/tests/test_model.pt +mv tmp*10000.pt onmt/tests/test_model.pt rm tmp*.pt fi # @@ -24,7 +24,7 @@ $my_python preprocess.py -train_src data/src-train.txt -train_tgt data/tgt-train $my_python train.py -data data/data -save_model /tmp/tmp -world_size 1 -gpu_ranks 0 -rnn_size 256 -word_vec_size 256 -layers 2 -train_steps 10000 -optim adam -learning_rate 0.001 -encoder_type cnn -decoder_type cnn -mv /tmp/tmp*e10.pt onmt/tests/test_model.pt +mv /tmp/tmp*10000.pt onmt/tests/test_model.pt rm /tmp/tmp*.pt fi @@ -37,7 +37,7 @@ $my_python preprocess.py -train_src data/morph/src.train -train_tgt data/morph/t $my_python train.py -data data/morph/data -save_model tmp -world_size 1 -gpu_ranks 0 -rnn_size 400 -word_vec_size 100 -layers 1 -train_steps 8000 -optim adam -learning_rate 0.001 -mv tmp*e8.pt onmt/tests/test_model2.pt +mv tmp*8000.pt onmt/tests/test_model2.pt rm tmp*.pt fi @@ -53,7 +53,7 @@ $my_python train.py -data data/data -save_model /tmp/tmp -batch_type tokens -bat -max_grad_norm 0 -optim adam -decay_method noam -learning_rate 2 -label_smoothing 0.1 \ -position_encoding -param_init 0 -warmup_steps 100 -param_init_glorot -adam_beta2 0.998 # -mv /tmp/tmp*e10.pt onmt/tests/test_model.pt +mv /tmp/tmp*10000.pt onmt/tests/test_model.pt rm /tmp/tmp*.pt fi # diff --git a/onmt/tests/test_audio_dataset.py b/onmt/tests/test_audio_dataset.py deleted file mode 100644 index 535d0967fd..0000000000 --- a/onmt/tests/test_audio_dataset.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest -from onmt.inputters.audio_dataset import AudioSeqField, AudioDataReader - -import itertools -import os -import shutil - -import torch -import torchaudio - -from onmt.tests.utils_for_tests import product_dict - - -class TestAudioField(unittest.TestCase): - INIT_CASES = list(product_dict( - pad_index=[0, 32], - batch_first=[False, True], - include_lengths=[True, False])) - - PARAMS = list(product_dict( - batch_size=[1, 17], - max_len=[23], - full_length_seq=[0, 5, 16], - nfeats=[1, 5])) - - @classmethod - def degenerate_case(cls, init_case, params): - if params["batch_size"] < params["full_length_seq"]: - return True - return False - - @classmethod - def pad_inputs(cls, params): - lengths = torch.randint(1, params["max_len"], - (params["batch_size"],)).tolist() - lengths[params["full_length_seq"]] = params["max_len"] - fake_input = [ - torch.randn((params["nfeats"], lengths[b])) - for b in range(params["batch_size"])] - return fake_input, lengths - - @classmethod - def numericalize_inputs(cls, init_case, params): - bs = params["batch_size"] - max_len = params["max_len"] - lengths = torch.randint(1, max_len, (bs,)) - lengths[params["full_length_seq"]] = max_len - nfeats = params["nfeats"] - fake_input = torch.full( - (bs, 1, nfeats, max_len), float(init_case["pad_index"])) - for b in range(bs): - fake_input[b, :, :, :lengths[b]] = torch.randn( - (1, nfeats, lengths[b])) - if init_case["include_lengths"]: - fake_input = (fake_input, lengths) - return fake_input, lengths - - def test_pad_shape_and_lengths(self): - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params): - field = AudioSeqField(**init_case) - fake_input, lengths = self.pad_inputs(params) - outp = field.pad(fake_input) - if init_case["include_lengths"]: - outp, _ = outp - expected_shape = ( - params["batch_size"], 1, params["nfeats"], - params["max_len"]) - self.assertEqual(outp.shape, expected_shape) - - def test_pad_returns_correct_lengths(self): - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params) and \ - init_case["include_lengths"]: - field = AudioSeqField(**init_case) - fake_input, lengths = self.pad_inputs(params) - _, outp_lengths = field.pad(fake_input) - self.assertEqual(outp_lengths, lengths) - - def test_pad_pads_right_places_and_uses_correct_index(self): - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params): - field = AudioSeqField(**init_case) - fake_input, lengths = self.pad_inputs(params) - outp = field.pad(fake_input) - if init_case["include_lengths"]: - outp, _ = outp - for b in range(params["batch_size"]): - for s in range(lengths[b], params["max_len"]): - self.assertTrue( - outp[b, :, :, s].allclose( - torch.tensor(float(init_case["pad_index"])))) - - def test_numericalize_shape(self): - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params): - field = AudioSeqField(**init_case) - fake_input, lengths = self.numericalize_inputs( - init_case, params) - outp = field.numericalize(fake_input) - if init_case["include_lengths"]: - outp, _ = outp - if init_case["batch_first"]: - expected_shape = ( - params["batch_size"], 1, - params["nfeats"], params["max_len"]) - else: - expected_shape = ( - params["max_len"], params["batch_size"], - 1, params["nfeats"]) - self.assertEqual(expected_shape, outp.shape, - init_case.__str__()) - - def test_process_shape(self): - # tests pad and numericalize integration - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params): - field = AudioSeqField(**init_case) - fake_input, lengths = self.pad_inputs(params) - outp = field.process(fake_input) - if init_case["include_lengths"]: - outp, _ = outp - if init_case["batch_first"]: - expected_shape = ( - params["batch_size"], 1, - params["nfeats"], params["max_len"]) - else: - expected_shape = ( - params["max_len"], params["batch_size"], - 1, params["nfeats"]) - self.assertEqual(expected_shape, outp.shape, - init_case.__str__()) - - def test_process_lengths(self): - # tests pad and numericalize integration - for init_case, params in itertools.product( - self.INIT_CASES, self.PARAMS): - if not self.degenerate_case(init_case, params): - if init_case["include_lengths"]: - field = AudioSeqField(**init_case) - fake_input, lengths = self.pad_inputs(params) - lengths = torch.tensor(lengths, dtype=torch.int) - _, outp_lengths = field.process(fake_input) - self.assertTrue(outp_lengths.eq(lengths).all()) - - -class TestAudioDataReader(unittest.TestCase): - # this test touches the file system, so it could be considered an - # integration test - _THIS_DIR = os.path.dirname(os.path.abspath(__file__)) - _AUDIO_DATA_DIRNAME = "test_audio_data" - _AUDIO_DATA_DIR = os.path.join(_THIS_DIR, _AUDIO_DATA_DIRNAME) - _AUDIO_DATA_FMT = "test_noise_{:d}.wav" - _AUDIO_DATA_PATH_FMT = os.path.join(_AUDIO_DATA_DIR, _AUDIO_DATA_FMT) - - _AUDIO_LIST_DIR = "test_audio_filenames" - # file to hold full paths to audio data - _AUDIO_LIST_PATHS_FNAME = "test_files.txt" - _AUDIO_LIST_PATHS_PATH = os.path.join( - _AUDIO_LIST_DIR, _AUDIO_LIST_PATHS_FNAME) - # file to hold audio paths relative to _AUDIO_DATA_DIR (i.e. file names) - _AUDIO_LIST_FNAMES_FNAME = "test_fnames.txt" - _AUDIO_LIST_FNAMES_PATH = os.path.join( - _AUDIO_LIST_DIR, _AUDIO_LIST_FNAMES_FNAME) - - # it's ok if non-audio files co-exist with audio files in the data dir - _JUNK_FILE = os.path.join( - _AUDIO_DATA_DIR, "this_is_junk.txt") - - _N_EXAMPLES = 20 - _SAMPLE_RATE = 48000 - _N_CHANNELS = 2 - - @classmethod - def setUpClass(cls): - if not os.path.exists(cls._AUDIO_DATA_DIR): - os.makedirs(cls._AUDIO_DATA_DIR) - if not os.path.exists(cls._AUDIO_LIST_DIR): - os.makedirs(cls._AUDIO_LIST_DIR) - - with open(cls._JUNK_FILE, "w") as f: - f.write("this is some garbage\nShould have no impact.") - - with open(cls._AUDIO_LIST_PATHS_PATH, "w") as f_list_fnames, \ - open(cls._AUDIO_LIST_FNAMES_PATH, "w") as f_list_paths: - lengths = torch.randint(int(.5e5), int(1.5e6), (cls._N_EXAMPLES,)) - for i in range(cls._N_EXAMPLES): - # dividing gets the noise in [-1, 1] - white_noise = torch.randn((cls._N_CHANNELS, lengths[i])) / 10 - f_path = cls._AUDIO_DATA_PATH_FMT.format(i) - torchaudio.save(f_path, white_noise, cls._SAMPLE_RATE) - f_name_short = cls._AUDIO_DATA_FMT.format(i) - f_list_fnames.write(f_name_short + "\n") - f_list_paths.write(f_path + "\n") - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls._AUDIO_DATA_DIR) - shutil.rmtree(cls._AUDIO_LIST_DIR) - - def test_read_from_dir_and_data_file_containing_filenames(self): - rdr = AudioDataReader(self._SAMPLE_RATE, window="hamming", - window_size=0.02, window_stride=0.01) - i = 0 # initialize since there's a sanity check on i - for i, aud in enumerate(rdr.read( - self._AUDIO_LIST_FNAMES_PATH, "src", self._AUDIO_DATA_DIR)): - self.assertEqual(aud["src"].shape[0], 481) - self.assertEqual(aud["src_path"], - self._AUDIO_DATA_PATH_FMT.format(i)) - self.assertGreater(i, 0, "No audio data was read.") - - def test_read_from_dir_and_data_file_containing_paths(self): - rdr = AudioDataReader(self._SAMPLE_RATE, window="hamming", - window_size=0.02, window_stride=0.01) - i = 0 # initialize since there's a sanity check on i - for i, aud in enumerate(rdr.read( - self._AUDIO_LIST_PATHS_PATH, "src", self._AUDIO_DATA_DIR)): - self.assertEqual(aud["src"].shape[0], 481) - self.assertEqual(aud["src_path"], - self._AUDIO_DATA_FMT.format(i)) - self.assertGreater(i, 0, "No audio data was read.") diff --git a/onmt/tests/test_data_prepare.py b/onmt/tests/test_data_prepare.py new file mode 100644 index 0000000000..73918f02aa --- /dev/null +++ b/onmt/tests/test_data_prepare.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import print_function + +import copy +import unittest +import glob +import os + +from onmt.utils.parse import ArgumentParser +from onmt.opts import dynamic_prepare_opts +from onmt.bin.train import prepare_fields_transforms +from onmt.constants import CorpusName + + +SAVE_DATA_PREFIX = 'data/test_data_prepare' + + +def get_default_opts(): + parser = ArgumentParser(description='data sample prepare') + dynamic_prepare_opts(parser) + + default_opts = [ + '-config', 'data/data.yaml', + '-src_vocab', 'data/vocab-train.src', + '-tgt_vocab', 'data/vocab-train.tgt' + ] + + opt = parser.parse_known_args(default_opts)[0] + # Inject some dummy training options that may needed when build fields + opt.copy_attn = False + ArgumentParser.validate_prepare_opts(opt) + return opt + + +default_opts = get_default_opts() + + +class TestData(unittest.TestCase): + def __init__(self, *args, **kwargs): + super(TestData, self).__init__(*args, **kwargs) + self.opt = default_opts + + def dataset_build(self, opt): + try: + prepare_fields_transforms(opt) + except SystemExit as err: + print(err) + except IOError as err: + if opt.skip_empty_level != 'error': + raise err + else: + print(f"Catched IOError: {err}") + finally: + # Remove the generated *pt files. + for pt in glob.glob(SAVE_DATA_PREFIX + '*.pt'): + os.remove(pt) + if self.opt.save_data: + # Remove the generated data samples + sample_path = os.path.join( + os.path.dirname(self.opt.save_data), + CorpusName.SAMPLE) + if os.path.exists(sample_path): + for f in glob.glob(sample_path + '/*'): + os.remove(f) + os.rmdir(sample_path) + + +def _add_test(param_setting, methodname): + """ + Adds a Test to TestData according to settings + + Args: + param_setting: list of tuples of (param, setting) + methodname: name of the method that gets called + """ + + def test_method(self): + if param_setting: + opt = copy.deepcopy(self.opt) + for param, setting in param_setting: + setattr(opt, param, setting) + else: + opt = self.opt + getattr(self, methodname)(opt) + if param_setting: + name = 'test_' + methodname + "_" + "_".join( + str(param_setting).split()) + else: + name = 'test_' + methodname + '_standard' + setattr(TestData, name, test_method) + test_method.__name__ = name + + +test_databuild = [[], + [('src_vocab_size', 1), + ('tgt_vocab_size', 1)], + [('src_vocab_size', 10000), + ('tgt_vocab_size', 10000)], + [('src_seq_len', 1)], + [('src_seq_len', 5000)], + [('src_seq_length_trunc', 1)], + [('src_seq_length_trunc', 5000)], + [('tgt_seq_len', 1)], + [('tgt_seq_len', 5000)], + [('tgt_seq_length_trunc', 1)], + [('tgt_seq_length_trunc', 5000)], + [('copy_attn', True)], + [('share_vocab', True)], + [('n_sample', 30), + ('save_data', SAVE_DATA_PREFIX)], + [('n_sample', 30), + ('save_data', SAVE_DATA_PREFIX), + ('skip_empty_level', 'error')] + ] + +for p in test_databuild: + _add_test(p, 'dataset_build') diff --git a/onmt/tests/test_image_dataset.py b/onmt/tests/test_image_dataset.py deleted file mode 100644 index fd452ae072..0000000000 --- a/onmt/tests/test_image_dataset.py +++ /dev/null @@ -1,92 +0,0 @@ -import unittest -from onmt.inputters.image_dataset import ImageDataReader - -import os -import shutil - -import cv2 -import numpy as np -import torch - - -class TestImageDataReader(unittest.TestCase): - # this test touches the file system, so it could be considered an - # integration test - _THIS_DIR = os.path.dirname(os.path.abspath(__file__)) - _IMG_DATA_DIRNAME = "test_image_data" - _IMG_DATA_DIR = os.path.join(_THIS_DIR, _IMG_DATA_DIRNAME) - _IMG_DATA_FMT = "test_img_{:d}.png" - _IMG_DATA_PATH_FMT = os.path.join(_IMG_DATA_DIR, _IMG_DATA_FMT) - - _IMG_LIST_DIR = "test_image_filenames" - # file to hold full paths to image data - _IMG_LIST_PATHS_FNAME = "test_files.txt" - _IMG_LIST_PATHS_PATH = os.path.join( - _IMG_LIST_DIR, _IMG_LIST_PATHS_FNAME) - # file to hold image paths relative to _IMG_DATA_DIR (i.e. file names) - _IMG_LIST_FNAMES_FNAME = "test_fnames.txt" - _IMG_LIST_FNAMES_PATH = os.path.join( - _IMG_LIST_DIR, _IMG_LIST_FNAMES_FNAME) - - # it's ok if non-image files co-exist with image files in the data dir - _JUNK_FILE = os.path.join( - _IMG_DATA_DIR, "this_is_junk.txt") - - _N_EXAMPLES = 20 - _N_CHANNELS = 3 - - @classmethod - def setUpClass(cls): - if not os.path.exists(cls._IMG_DATA_DIR): - os.makedirs(cls._IMG_DATA_DIR) - if not os.path.exists(cls._IMG_LIST_DIR): - os.makedirs(cls._IMG_LIST_DIR) - - with open(cls._JUNK_FILE, "w") as f: - f.write("this is some garbage\nShould have no impact.") - - with open(cls._IMG_LIST_PATHS_PATH, "w") as f_list_fnames, \ - open(cls._IMG_LIST_FNAMES_PATH, "w") as f_list_paths: - cls.n_rows = torch.randint(30, 314, (cls._N_EXAMPLES,)) - cls.n_cols = torch.randint(30, 314, (cls._N_EXAMPLES,)) - for i in range(cls._N_EXAMPLES): - img = np.random.randint( - 0, 255, (cls.n_rows[i], cls.n_cols[i], cls._N_CHANNELS)) - f_path = cls._IMG_DATA_PATH_FMT.format(i) - cv2.imwrite(f_path, img) - f_name_short = cls._IMG_DATA_FMT.format(i) - f_list_fnames.write(f_name_short + "\n") - f_list_paths.write(f_path + "\n") - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls._IMG_DATA_DIR) - shutil.rmtree(cls._IMG_LIST_DIR) - - def test_read_from_dir_and_data_file_containing_filenames(self): - rdr = ImageDataReader(channel_size=self._N_CHANNELS) - i = 0 # initialize since there's a sanity check on i - for i, img in enumerate(rdr.read( - self._IMG_LIST_FNAMES_PATH, "src", self._IMG_DATA_DIR)): - self.assertEqual( - img["src"].shape, - (self._N_CHANNELS, self.n_rows[i], self.n_cols[i])) - self.assertEqual(img["src_path"], - self._IMG_DATA_PATH_FMT.format(i)) - self.assertGreater(i, 0, "No image data was read.") - - def test_read_from_dir_and_data_file_containing_paths(self): - rdr = ImageDataReader(channel_size=self._N_CHANNELS) - i = 0 # initialize since there's a sanity check on i - for i, img in enumerate(rdr.read( - self._IMG_LIST_PATHS_PATH, "src", self._IMG_DATA_DIR)): - self.assertEqual( - img["src"].shape, - (self._N_CHANNELS, self.n_rows[i], self.n_cols[i])) - self.assertEqual(img["src_path"], - self._IMG_DATA_FMT.format(i)) - self.assertGreater(i, 0, "No image data was read.") - - -class TestImageDataReader1Channel(TestImageDataReader): - _N_CHANNELS = 1 diff --git a/onmt/tests/test_model.pt b/onmt/tests/test_model.pt index daba1a88f4..a6ebd0cb01 100644 Binary files a/onmt/tests/test_model.pt and b/onmt/tests/test_model.pt differ diff --git a/onmt/tests/test_model2.pt b/onmt/tests/test_model2.pt index 67b931ecf0..f1867da325 100644 Binary files a/onmt/tests/test_model2.pt and b/onmt/tests/test_model2.pt differ diff --git a/onmt/tests/test_models.py b/onmt/tests/test_models.py index 76dc5b48b1..a7d11e32e9 100644 --- a/onmt/tests/test_models.py +++ b/onmt/tests/test_models.py @@ -1,6 +1,5 @@ import copy import unittest -import math import torch @@ -9,13 +8,11 @@ import onmt.opts from onmt.model_builder import build_embeddings, \ build_encoder, build_decoder -from onmt.encoders.image_encoder import ImageEncoder -from onmt.encoders.audio_encoder import AudioEncoder from onmt.utils.parse import ArgumentParser parser = ArgumentParser(description='train.py') onmt.opts.model_opts(parser) -onmt.opts.train_opts(parser) +onmt.opts._add_train_general_opts(parser) # -data option is required, but not used in this test, so dummy. opt = parser.parse_known_args(['-data', 'dummy'])[0] @@ -39,22 +36,6 @@ def get_batch(self, source_l=3, bsize=1): test_length = torch.ones(bsize).fill_(source_l).long() return test_src, test_tgt, test_length - def get_batch_image(self, tgt_l=3, bsize=1, h=15, w=17): - # batch x c x h x w - test_src = torch.ones(bsize, 3, h, w).float() - test_tgt = torch.ones(tgt_l, bsize, 1).long() - test_length = None - return test_src, test_tgt, test_length - - def get_batch_audio(self, tgt_l=7, bsize=3, sample_rate=5500, - window_size=0.03, t=37): - # batch x 1 x nfft x t - nfft = int(math.floor((sample_rate * window_size) / 2) + 1) - test_src = torch.ones(bsize, 1, nfft, t).float() - test_tgt = torch.ones(tgt_l, bsize, 1).long() - test_length = torch.ones(bsize).long().fill_(tgt_l) - return test_src, test_tgt, test_length - def embeddings_forward(self, opt, source_l=3, bsize=1): ''' Tests if the embeddings works as expected @@ -140,78 +121,6 @@ def nmtmodel_forward(self, opt, source_l=3, bsize=1): self.assertEqual(outputs.size(), outputsize.size()) self.assertEqual(type(outputs), torch.Tensor) - def imagemodel_forward(self, opt, tgt_l=2, bsize=1, h=15, w=17): - """ - Creates an image-to-text nmtmodel with a custom opt function. - Forwards a testbatch and checks output size. - - Args: - opt: Namespace with options - source_l: length of input sequence - bsize: batchsize - """ - if opt.encoder_type == 'transformer' or opt.encoder_type == 'cnn': - return - - word_field = self.get_field() - - enc = ImageEncoder( - opt.enc_layers, opt.brnn, opt.enc_rnn_size, - opt.dropout) - - embeddings = build_embeddings(opt, word_field, for_encoder=False) - dec = build_decoder(opt, embeddings) - - model = onmt.models.model.NMTModel(enc, dec) - - test_src, test_tgt, test_length = self.get_batch_image( - h=h, w=w, - bsize=bsize, - tgt_l=tgt_l) - outputs, attn = model(test_src, test_tgt, test_length) - outputsize = torch.zeros(tgt_l - 1, bsize, opt.dec_rnn_size) - # Make sure that output has the correct size and type - self.assertEqual(outputs.size(), outputsize.size()) - self.assertEqual(type(outputs), torch.Tensor) - - def audiomodel_forward(self, opt, tgt_l=7, bsize=3, t=37): - """ - Creates a speech-to-text nmtmodel with a custom opt function. - Forwards a testbatch and checks output size. - - Args: - opt: Namespace with options - source_l: length of input sequence - bsize: batchsize - """ - if opt.encoder_type == 'transformer' or opt.encoder_type == 'cnn': - return - if opt.rnn_type == 'SRU': - return - - word_field = self.get_field() - - enc = AudioEncoder(opt.rnn_type, opt.enc_layers, opt.dec_layers, - opt.brnn, opt.enc_rnn_size, opt.dec_rnn_size, - opt.audio_enc_pooling, opt.dropout, - opt.sample_rate, opt.window_size) - - embeddings = build_embeddings(opt, word_field, for_encoder=False) - dec = build_decoder(opt, embeddings) - - model = onmt.models.model.NMTModel(enc, dec) - - test_src, test_tgt, test_length = self.get_batch_audio( - bsize=bsize, - sample_rate=opt.sample_rate, - window_size=opt.window_size, - t=t, tgt_l=tgt_l) - outputs, attn = model(test_src, test_tgt, test_length) - outputsize = torch.zeros(tgt_l - 1, bsize, opt.dec_rnn_size) - # Make sure that output has the correct size and type - self.assertEqual(outputs.size(), outputsize.size()) - self.assertEqual(type(outputs), torch.Tensor) - def _add_test(param_setting, methodname): """ @@ -305,20 +214,3 @@ def test_method(self): for p in tests_nmtmodel: _add_test(p, 'nmtmodel_forward') - -for p in tests_nmtmodel: - _add_test(p, 'imagemodel_forward') - -for p in tests_nmtmodel: - p.append(('sample_rate', 5500)) - p.append(('window_size', 0.03)) - # when reasonable, set audio_enc_pooling to 2 - for arg, val in p: - if arg == "layers" and int(val) > 2: - # Need lengths >= audio_enc_pooling**n_layers. - # That condition is unrealistic for large n_layers, - # so leave audio_enc_pooling at 1. - break - else: - p.append(('audio_enc_pooling', '2')) - _add_test(p, 'audiomodel_forward') diff --git a/onmt/tests/test_preprocess.py b/onmt/tests/test_preprocess.py deleted file mode 100644 index b3150ec6c4..0000000000 --- a/onmt/tests/test_preprocess.py +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -from __future__ import print_function - -import configargparse -import copy -import unittest -import glob -import os -import codecs - -import onmt -import onmt.inputters -import onmt.opts -import onmt.bin.preprocess as preprocess - - -parser = configargparse.ArgumentParser(description='preprocess.py') -onmt.opts.preprocess_opts(parser) - -SAVE_DATA_PREFIX = 'data/test_preprocess' - -default_opts = [ - '-data_type', 'text', - '-train_src', 'data/src-train.txt', - '-train_tgt', 'data/tgt-train.txt', - '-valid_src', 'data/src-val.txt', - '-valid_tgt', 'data/tgt-val.txt', - '-save_data', SAVE_DATA_PREFIX -] - -opt = parser.parse_known_args(default_opts)[0] - - -class TestData(unittest.TestCase): - def __init__(self, *args, **kwargs): - super(TestData, self).__init__(*args, **kwargs) - self.opt = opt - - def dataset_build(self, opt): - fields = onmt.inputters.get_fields("text", 0, 0) - - if hasattr(opt, 'src_vocab') and len(opt.src_vocab) > 0: - with codecs.open(opt.src_vocab, 'w', 'utf-8') as f: - f.write('a\nb\nc\nd\ne\nf\n') - if hasattr(opt, 'tgt_vocab') and len(opt.tgt_vocab) > 0: - with codecs.open(opt.tgt_vocab, 'w', 'utf-8') as f: - f.write('a\nb\nc\nd\ne\nf\n') - - src_reader = onmt.inputters.str2reader[opt.data_type].from_opt(opt) - tgt_reader = onmt.inputters.str2reader["text"].from_opt(opt) - align_reader = onmt.inputters.str2reader["text"].from_opt(opt) - preprocess.build_save_dataset( - 'train', fields, src_reader, tgt_reader, align_reader, opt) - - preprocess.build_save_dataset( - 'valid', fields, src_reader, tgt_reader, align_reader, opt) - - # Remove the generated *pt files. - for pt in glob.glob(SAVE_DATA_PREFIX + '*.pt'): - os.remove(pt) - if hasattr(opt, 'src_vocab') and os.path.exists(opt.src_vocab): - os.remove(opt.src_vocab) - if hasattr(opt, 'tgt_vocab') and os.path.exists(opt.tgt_vocab): - os.remove(opt.tgt_vocab) - - -def _add_test(param_setting, methodname): - """ - Adds a Test to TestData according to settings - - Args: - param_setting: list of tuples of (param, setting) - methodname: name of the method that gets called - """ - - def test_method(self): - if param_setting: - opt = copy.deepcopy(self.opt) - for param, setting in param_setting: - setattr(opt, param, setting) - else: - opt = self.opt - getattr(self, methodname)(opt) - if param_setting: - name = 'test_' + methodname + "_" + "_".join( - str(param_setting).split()) - else: - name = 'test_' + methodname + '_standard' - setattr(TestData, name, test_method) - test_method.__name__ = name - - -test_databuild = [[], - [('src_vocab_size', 1), - ('tgt_vocab_size', 1)], - [('src_vocab_size', 10000), - ('tgt_vocab_size', 10000)], - [('src_seq_len', 1)], - [('src_seq_len', 5000)], - [('src_seq_length_trunc', 1)], - [('src_seq_length_trunc', 5000)], - [('tgt_seq_len', 1)], - [('tgt_seq_len', 5000)], - [('tgt_seq_length_trunc', 1)], - [('tgt_seq_length_trunc', 5000)], - [('shuffle', 0)], - [('lower', True)], - [('dynamic_dict', True)], - [('share_vocab', True)], - [('dynamic_dict', True), - ('share_vocab', True)], - [('dynamic_dict', True), - ('shard_size', 500000)], - [('src_vocab', '/tmp/src_vocab.txt'), - ('tgt_vocab', '/tmp/tgt_vocab.txt')], - ] - -for p in test_databuild: - _add_test(p, 'dataset_build') - -# Test image preprocessing -test_databuild = [[], - [('tgt_vocab_size', 1)], - [('tgt_vocab_size', 10000)], - [('tgt_seq_len', 1)], - [('tgt_seq_len', 5000)], - [('tgt_seq_length_trunc', 1)], - [('tgt_seq_length_trunc', 5000)], - [('shuffle', 0)], - [('lower', True)], - [('shard_size', 5)], - [('shard_size', 50)], - [('tgt_vocab', '/tmp/tgt_vocab.txt')], - ] -test_databuild_common = [('data_type', 'img'), - ('src_dir', '/tmp/im2text/images'), - ('train_src', ['/tmp/im2text/src-train-head.txt']), - ('train_tgt', ['/tmp/im2text/tgt-train-head.txt']), - ('valid_src', '/tmp/im2text/src-val-head.txt'), - ('valid_tgt', '/tmp/im2text/tgt-val-head.txt'), - ] -for p in test_databuild: - _add_test(p + test_databuild_common, 'dataset_build') - -# Test audio preprocessing -test_databuild = [[], - [('tgt_vocab_size', 1)], - [('tgt_vocab_size', 10000)], - [('src_seq_len', 1)], - [('src_seq_len', 5000)], - [('src_seq_length_trunc', 3200)], - [('src_seq_length_trunc', 5000)], - [('tgt_seq_len', 1)], - [('tgt_seq_len', 5000)], - [('tgt_seq_length_trunc', 1)], - [('tgt_seq_length_trunc', 5000)], - [('shuffle', 0)], - [('lower', True)], - [('shard_size', 5)], - [('shard_size', 50)], - [('tgt_vocab', '/tmp/tgt_vocab.txt')], - ] -test_databuild_common = [('data_type', 'audio'), - ('src_dir', '/tmp/speech/an4_dataset'), - ('train_src', ['/tmp/speech/src-train-head.txt']), - ('train_tgt', ['/tmp/speech/tgt-train-head.txt']), - ('valid_src', '/tmp/speech/src-val-head.txt'), - ('valid_tgt', '/tmp/speech/tgt-val-head.txt'), - ('sample_rate', 16000), - ('window_size', 0.04), - ('window_stride', 0.02), - ('window', 'hamming'), - ] -for p in test_databuild: - _add_test(p + test_databuild_common, 'dataset_build') diff --git a/onmt/train_single.py b/onmt/train_single.py old mode 100755 new mode 100644 index f9702c3f8e..4dc96c273b --- a/onmt/train_single.py +++ b/onmt/train_single.py @@ -1,11 +1,8 @@ #!/usr/bin/env python """Training on a single process.""" -import os - import torch -from onmt.inputters.inputter import build_dataset_iter, patch_fields, \ - load_old_vocab, old_style_vocab, build_dataset_iter_multiple, IterOnDevice +from onmt.inputters.inputter import IterOnDevice from onmt.model_builder import build_model from onmt.utils.optimizers import Optimizer from onmt.utils.misc import set_random_seed @@ -14,23 +11,7 @@ from onmt.utils.logging import init_logger, logger from onmt.utils.parse import ArgumentParser - -def _check_save_model_path(opt): - save_model_path = os.path.abspath(opt.save_model) - model_dirname = os.path.dirname(save_model_path) - if not os.path.exists(model_dirname): - os.makedirs(model_dirname) - - -def _tally_parameters(model): - enc = 0 - dec = 0 - for name, param in model.named_parameters(): - if 'encoder' in name: - enc += param.nelement() - else: - dec += param.nelement() - return enc + dec, enc, dec +from onmt.inputters.dynamic_iterator import build_dynamic_dataset_iter def configure_process(opt, device_id): @@ -39,57 +20,45 @@ def configure_process(opt, device_id): set_random_seed(opt.seed, device_id >= 0) -def main(opt, device_id, batch_queue=None, semaphore=None): - # NOTE: It's important that ``opt`` has been validated and updated - # at this point. - configure_process(opt, device_id) - init_logger(opt.log_file) - assert len(opt.accum_count) == len(opt.accum_steps), \ - 'Number of accum_count values must match number of accum_steps' - # Load checkpoint if we resume from a previous training. - if opt.train_from: - logger.info('Loading checkpoint from %s' % opt.train_from) - checkpoint = torch.load(opt.train_from, - map_location=lambda storage, loc: storage) +def _get_model_opts(opt, checkpoint=None): + """Get `model_opt` to build model, may load from `checkpoint` if any.""" + if checkpoint is not None: model_opt = ArgumentParser.ckpt_model_opts(checkpoint["opt"]) ArgumentParser.update_model_opts(model_opt) ArgumentParser.validate_model_opts(model_opt) - logger.info('Loading vocab from checkpoint at %s.' % opt.train_from) - vocab = checkpoint['vocab'] else: - checkpoint = None model_opt = opt - vocab = torch.load(opt.data + '.vocab.pt') + return model_opt - # check for code where vocab is saved instead of fields - # (in the future this will be done in a smarter way) - if old_style_vocab(vocab): - fields = load_old_vocab( - vocab, opt.model_type, dynamic_dict=opt.copy_attn) - else: - fields = vocab - - # patch for fields that may be missing in old data/model - patch_fields(opt, fields) - - # Report src and tgt vocab sizes, including for features - for side in ['src', 'tgt']: - f = fields[side] - try: - f_iter = iter(f) - except TypeError: - f_iter = [(side, f)] - for sn, sf in f_iter: - if sf.use_vocab: - logger.info(' * %s vocab size = %d' % (sn, len(sf.vocab))) + +def _build_valid_iter(opt, fields, transforms_cls): + """Build iterator used for validation.""" + valid_iter = build_dynamic_dataset_iter( + fields, transforms_cls, opt, is_train=False) + return valid_iter + + +def _build_train_iter(opt, fields, transforms_cls, stride=1, offset=0): + """Build training iterator.""" + train_iter = build_dynamic_dataset_iter( + fields, transforms_cls, opt, is_train=True, + stride=stride, offset=offset) + return train_iter + + +def main(opt, fields, transforms_cls, checkpoint, device_id, + batch_queue=None, semaphore=None): + """Start training on `device_id`.""" + # NOTE: It's important that ``opt`` has been validated and updated + # at this point. + configure_process(opt, device_id) + init_logger(opt.log_file) + + model_opt = _get_model_opts(opt, checkpoint=checkpoint) # Build model. model = build_model(model_opt, opt, fields, checkpoint) - n_params, enc, dec = _tally_parameters(model) - logger.info('encoder: %d' % enc) - logger.info('decoder: %d' % dec) - logger.info('* number of parameters: %d' % n_params) - _check_save_model_path(opt) + model.count_parameters(log=logger.info) # Build optimizer. optim = Optimizer.from_opt(model, opt, checkpoint=checkpoint) @@ -101,19 +70,8 @@ def main(opt, device_id, batch_queue=None, semaphore=None): opt, device_id, model, fields, optim, model_saver=model_saver) if batch_queue is None: - if len(opt.data_ids) > 1: - train_shards = [] - for train_id in opt.data_ids: - shard_base = "train_" + train_id - train_shards.append(shard_base) - train_iter = build_dataset_iter_multiple(train_shards, fields, opt) - else: - if opt.data_ids[0] is not None: - shard_base = "train_" + opt.data_ids[0] - else: - shard_base = "train" - train_iter = build_dataset_iter(shard_base, fields, opt) - train_iter = IterOnDevice(train_iter, device_id) + _train_iter = _build_train_iter(opt, fields, transforms_cls) + train_iter = IterOnDevice(_train_iter, device_id) else: assert semaphore is not None, \ "Using batch_queue requires semaphore as well" @@ -128,8 +86,7 @@ def _train_iter(): train_iter = _train_iter() - valid_iter = build_dataset_iter( - "valid", fields, opt, is_train=False) + valid_iter = _build_valid_iter(opt, fields, transforms_cls) if valid_iter is not None: valid_iter = IterOnDevice(valid_iter, device_id) diff --git a/onmt/trainer.py b/onmt/trainer.py index 334ab02f12..16810348f0 100644 --- a/onmt/trainer.py +++ b/onmt/trainer.py @@ -26,7 +26,7 @@ def build_trainer(opt, device_id, model, fields, optim, model_saver=None): fields (dict): dict of fields optim (:obj:`onmt.utils.Optimizer`): optimizer used during training data_type (str): string describing the type of data - e.g. "text", "img", "audio" + e.g. "text" model_saver(:obj:`onmt.models.ModelSaverBase`): the utility object used to save the model """ @@ -49,7 +49,7 @@ def build_trainer(opt, device_id, model, fields, optim, model_saver=None): if device_id >= 0: gpu_rank = opt.gpu_ranks[device_id] else: - gpu_rank = 0 + gpu_rank = -1 n_gpu = 0 gpu_verbose_level = opt.gpu_verbose_level @@ -57,24 +57,6 @@ def build_trainer(opt, device_id, model, fields, optim, model_saver=None): opt.early_stopping, scorers=onmt.utils.scorers_from_opts(opt)) \ if opt.early_stopping > 0 else None - source_noise = None - if len(opt.src_noise) > 0: - src_field = dict(fields)["src"].base_field - corpus_id_field = dict(fields).get("corpus_id", None) - if corpus_id_field is not None: - ids_to_noise = corpus_id_field.numericalize(opt.data_to_noise) - else: - ids_to_noise = None - source_noise = onmt.modules.source_noise.MultiNoise( - opt.src_noise, - opt.src_noise_prob, - ids_to_noise=ids_to_noise, - pad_idx=src_field.pad_token, - end_of_sentence_mask=src_field.end_of_sentence_mask, - word_start_mask=src_field.word_start_mask, - device_id=device_id - ) - report_manager = onmt.utils.build_report_manager(opt, gpu_rank) trainer = onmt.Trainer(model, train_loss, valid_loss, optim, trunc_size, shard_size, norm_method, @@ -82,14 +64,13 @@ def build_trainer(opt, device_id, model, fields, optim, model_saver=None): n_gpu, gpu_rank, gpu_verbose_level, report_manager, with_align=True if opt.lambda_align > 0 else False, - model_saver=model_saver if gpu_rank == 0 else None, + model_saver=model_saver if gpu_rank <= 0 else None, average_decay=average_decay, average_every=average_every, model_dtype=opt.model_dtype, earlystopper=earlystopper, dropout=dropout, - dropout_steps=dropout_steps, - source_noise=source_noise) + dropout_steps=dropout_steps) return trainer @@ -108,7 +89,7 @@ class Trainer(object): the optimizer responsible for update trunc_size(int): length of truncated back propagation through time shard_size(int): compute loss in shards of this size for efficiency - data_type(string): type of the source input: [text|img|audio] + data_type(string): type of the source input: [text] norm_method(string): normalization methods: [sents|tokens] accum_count(list): accumulate gradients this many times. accum_steps(list): steps for accum gradients changes. @@ -126,8 +107,7 @@ def __init__(self, model, train_loss, valid_loss, optim, n_gpu=1, gpu_rank=1, gpu_verbose_level=0, report_manager=None, with_align=False, model_saver=None, average_decay=0, average_every=1, model_dtype='fp32', - earlystopper=None, dropout=[0.3], dropout_steps=[0], - source_noise=None): + earlystopper=None, dropout=[0.3], dropout_steps=[0]): # Basic attributes. self.model = model self.train_loss = train_loss @@ -152,7 +132,6 @@ def __init__(self, model, train_loss, valid_loss, optim, self.earlystopper = earlystopper self.dropout = dropout self.dropout_steps = dropout_steps - self.source_noise = source_noise for i in range(len(self.accum_count_l)): assert self.accum_count_l[i] > 0 @@ -367,8 +346,6 @@ def _gradient_accumulation(self, true_batches, normalization, total_stats, else: trunc_size = target_size - batch = self.maybe_noise_source(batch) - src, src_lengths = batch.src if isinstance(batch.src, tuple) \ else (batch.src, None) if src_lengths is not None: @@ -496,8 +473,3 @@ def _report_step(self, learning_rate, step, train_stats=None, else self.earlystopper.current_tolerance, step, train_stats=train_stats, valid_stats=valid_stats) - - def maybe_noise_source(self, batch): - if self.source_noise is not None: - return self.source_noise(batch) - return batch diff --git a/onmt/transforms/__init__.py b/onmt/transforms/__init__.py new file mode 100644 index 0000000000..1834377a12 --- /dev/null +++ b/onmt/transforms/__init__.py @@ -0,0 +1,51 @@ +"""Module for dynamic data transfrom.""" +import os +import importlib + +from .transform import make_transforms, get_specials,\ + save_transforms, load_transforms, TransformPipe,\ + Transform + + +AVAILABLE_TRANSFORMS = {} + + +def get_transforms_cls(transform_names): + """Return valid transform class indicated in `transform_names`.""" + transforms_cls = {} + for name in transform_names: + if name not in AVAILABLE_TRANSFORMS: + raise ValueError("specified tranform not supported!") + transforms_cls[name] = AVAILABLE_TRANSFORMS[name] + return transforms_cls + + +__all__ = ["get_transforms_cls", "get_specials", "make_transforms", + "load_transforms", "save_transforms", "TransformPipe"] + + +def register_transform(name): + """Transform register that can be used to add new transform class.""" + + def register_transfrom_cls(cls): + if name in AVAILABLE_TRANSFORMS: + raise ValueError( + 'Cannot register duplicate transform ({})'.format(name)) + if not issubclass(cls, Transform): + raise ValueError('transform ({}: {}) must extend Transform'.format( + name, cls.__name__)) + AVAILABLE_TRANSFORMS[name] = cls + return cls + + return register_transfrom_cls + + +# Auto import python files in this directory +transform_dir = os.path.dirname(__file__) +for file in os.listdir(transform_dir): + path = os.path.join(transform_dir, file) + if not file.startswith('_') and not file.startswith('.') and ( + file.endswith('.py') or os.path.isdir(path)): + file_name = file[:file.find('.py')] if file.endswith('.py') else file + module = importlib.import_module( + 'onmt.transforms.' + file_name) diff --git a/onmt/transforms/bart.py b/onmt/transforms/bart.py new file mode 100644 index 0000000000..7f264325b3 --- /dev/null +++ b/onmt/transforms/bart.py @@ -0,0 +1,376 @@ +"""Transforms relate to noising from BART: based on code of fairseq.""" +import math +import numpy as np +import torch +from functools import partial +from onmt.constants import DefaultTokens, SubwordMarker +from onmt.transforms import register_transform +from .transform import Transform + + +def word_start(x, ignore_subword=False, is_joiner=False): + """Return if a token is the word start or not.""" + if not ignore_subword: + if is_joiner: + return not x.startswith(SubwordMarker.JOINER) + else: + return x.startswith(SubwordMarker.SPACER) + else: + return True + + +class BARTNoising(object): + """Noise from BART.""" + + def __init__(self, vocab, mask_tok=DefaultTokens.MASK, mask_ratio=0.0, + insert_ratio=0.0, permute_sent_ratio=0.0, poisson_lambda=3.0, + replace_length=-1, rotate_ratio=0.5, mask_length='subword', + random_ratio=0.0, is_joiner=False, + full_stop_token=DefaultTokens.SENT_FULL_STOPS): + self.vocab = vocab + + self.mask_tok = mask_tok + + self.mask_ratio = mask_ratio + self.random_ratio = random_ratio + self.insert_ratio = insert_ratio + self.rotate_ratio = rotate_ratio + self.permute_sent_ratio = permute_sent_ratio + + self.full_stop_token = full_stop_token + + # -1: keep everything (i.e. 1 mask per token) + # 0: replace everything (i.e. no mask) + # 1: 1 mask per span + if replace_length not in [-1, 0, 1]: + raise ValueError(f'invalid arg: replace_length={replace_length}') + self.replace_length = replace_length + + if mask_length not in ['subword', 'word', 'span-poisson']: + raise ValueError(f'invalid arg: mask-length={mask_length}') + if mask_length == 'subword' and replace_length not in [0, 1]: + raise ValueError(f'if using subwords, use replace-length=1 or 0') + + if mask_length == 'subword' or is_joiner is None: + # view each subword as word start / input is word level token + self.__is_word_start = partial(word_start, ignore_subword=True) + else: + self.__is_word_start = partial(word_start, is_joiner=is_joiner) + + self.mask_span_distribution = None + if mask_length == 'span-poisson': + self.mask_span_distribution = self._make_poisson(poisson_lambda) + self.mask_length = mask_length + self.poisson_lambda = poisson_lambda + + def _make_poisson(self, poisson_lambda): + lambda_to_the_k = 1 + e_to_the_minus_lambda = math.exp(-poisson_lambda) + k_factorial = 1 + ps = [] + for k in range(0, 128): + ps.append(e_to_the_minus_lambda * lambda_to_the_k / k_factorial) + lambda_to_the_k *= poisson_lambda + k_factorial *= (k + 1) + if ps[-1] < 0.0000001: + break + ps = torch.FloatTensor(ps) + return torch.distributions.Categorical(ps) + + def _is_full_stop(self, token): + return True if token in self.full_stop_token else False + + def permute_sentences(self, tokens, p=1.0): + if len(tokens) == 1: + return tokens + full_stops = np.array([self._is_full_stop(token) for token in tokens]) + # Pretend it ends with a full stop so last span is a sentence + full_stops[-1] = True + + # Tokens that are full stops, where the previous token is not + sentence_ends = (full_stops[1:] * ~full_stops[:-1]).nonzero()[0] + 2 + + n_sentences = sentence_ends.size + if n_sentences == 1: + return tokens + + n_to_permute = math.ceil((n_sentences * 2 * p) / 2.0) + + substitutions = np.random.permutation(n_sentences)[:n_to_permute] + ordering = np.arange(0, n_sentences) + ordering[substitutions] = substitutions[np.random.permutation( + n_to_permute)] + + result = [tok for tok in tokens] + index = 0 + for i in ordering: + sentence = tokens[(sentence_ends[i - 1] if i > 0 else 0): + sentence_ends[i]] + result[index:index + len(sentence)] = sentence + index += len(sentence) + assert len(result) == len(tokens), "Error when permute sentences." + return result + + def _is_word_start(self, token): + return self.__is_word_start(token) + + def whole_word_mask(self, tokens, p=1.0): # text span mask/infilling + is_word_start = torch.tensor( + [self._is_word_start(token) for token in tokens]).int() + n_mask = int(math.ceil(is_word_start.sum() * p)) + n_insert = 0 + if n_mask == 0: + return tokens + + if self.mask_span_distribution is not None: # Text (span) Infilling + lengths = self.mask_span_distribution.sample( + sample_shape=(n_mask,)) + + # Make sure we have enough to mask + cum_length = torch.cumsum(lengths, 0) + while cum_length[-1] < n_mask: + lengths = torch.cat([ + lengths, + self.mask_span_distribution.sample( + sample_shape=(n_mask,)) + ], dim=0) + cum_length = torch.cumsum(lengths, 0) + + # Trim to masking budget + i = 0 + while cum_length[i] < n_mask: + i += 1 + lengths[i] = n_mask - (0 if i == 0 else cum_length[i - 1]) + n_mask = i + 1 + lengths = lengths[:n_mask] + + # Handle 0-length mask (inserts) separately + lengths = lengths[lengths > 0] + n_insert = n_mask - lengths.size(0) + n_mask -= n_insert + if n_mask == 0: + return self.insertion_noise(tokens, n_insert / len(tokens)) + + assert (lengths > 0).all() + else: # Token Masking + lengths = torch.ones((n_mask,)).long() + # assert is_word_start[-1] == 0 + word_starts = is_word_start.nonzero(as_tuple=False) + indices = word_starts[torch.randperm(word_starts.size(0))[ + :n_mask]].squeeze(1) + mask_random = torch.FloatTensor(n_mask).uniform_() < self.random_ratio + + tokens_length = len(tokens) + # assert tokens_length - 1 not in indices + to_keep = torch.ones(tokens_length, dtype=torch.bool) + + if self.replace_length == 0: + to_keep[indices] = 0 + else: + # keep index, but replace it with [MASK] + for i in indices: + tokens[i] = self.mask_tok + random_toks = torch.randint( + 0, len(self.vocab), size=(mask_random.sum(),)) + for i, rand_tok in zip(indices[mask_random], random_toks): + tokens[i] = rand_tok + + if tokens_length - 1 in indices: + uncompleted = (indices != tokens_length - 1) + indices = indices[uncompleted] + mask_random = mask_random[uncompleted] + lengths = lengths[uncompleted] + + # acts as a long length, so spans don't go over the end of doc + is_word_start[-1] = 255 + + if self.mask_span_distribution is not None: + assert len(lengths.size()) == 1 + assert lengths.size() == indices.size() + lengths -= 1 # 1 for the position already masked + while indices.size(0) > 0: + assert lengths.size() == indices.size() + # next position from each word_start + lengths -= is_word_start[indices + 1].long() + uncompleted = lengths >= 0 + indices = indices[uncompleted] + 1 + mask_random = mask_random[uncompleted] + lengths = lengths[uncompleted] + if self.replace_length != -1: + # delete token: 1 mask/remove per span + to_keep[indices] = 0 + else: + # keep index, but replace it with [MASK]: 1 mask per token + for i in indices: + tokens[i] = self.mask_tok + random_toks = torch.randint( + 0, len(self.vocab), size=(mask_random.sum(),)) + for i, rand_tok in zip(indices[mask_random], random_toks): + tokens[i] = rand_tok + else: + # A bit faster when all lengths are 1 + while indices.size(0) > 0: + # to cover whole token + uncompleted = is_word_start[indices + 1] == 0 + indices = indices[uncompleted] + 1 + mask_random = mask_random[uncompleted] + if self.replace_length != -1: + # delete token + to_keep[indices] = 0 + else: + # keep index, but replace it with [MASK] + for i in indices: + tokens[i] = self.mask_tok + random_toks = torch.randint( + 0, len(self.vocab), size=(mask_random.sum(),)) + for i, rand_tok in zip(indices[mask_random], random_toks): + tokens[i] = rand_tok + + # assert tokens_length - 1 not in indices + + tokens = [tok for tok, keep in zip(tokens, to_keep) + if keep.item() is True] + + if n_insert > 0: + tokens = self.insertion_noise(tokens, n_insert / len(tokens)) + + return tokens + + def insertion_noise(self, tokens, p=1.0): + if p == 0.0: + return tokens + + n_tokens = len(tokens) + n_insert = int(math.ceil(n_tokens * p)) + n_random = int(math.ceil(n_insert * self.random_ratio)) + + noise_indices = np.random.permutation(n_tokens + n_insert)[:n_insert] + noise_mask = np.zeros(shape=(n_tokens + n_insert,), dtype=bool) + noise_mask[noise_indices] = 1 + + result = np.empty(shape=(n_tokens + n_insert,), dtype=object) + result[noise_indices[n_random:]] = self.mask_tok + if n_random > 0: + result[noise_indices[:n_random]] = np.random.choice( + self.vocab, size=n_random) + result[~noise_mask] = tokens + + assert all([item is not None for item in result]),\ + "Error when inserting noise." + return [tok for tok in result] + + def rolling_noise(self, tokens, p=1.0): + if np.random.random() >= p: + return tokens + offset = np.random.randint(0, max(1, len(tokens) - 1) + 1) + return tokens[offset:] + tokens[0:offset] + + def apply(self, tokens): + if self.vocab is None: + raise ValueError("Inject BART noise requires a valid vocabulary.") + + if self.permute_sent_ratio > 0.0: + tokens = self.permute_sentences(tokens, self.permute_sent_ratio) + + if self.mask_ratio > 0.0: + tokens = self.whole_word_mask(tokens, self.mask_ratio) + + if self.insert_ratio > 0.0: + tokens = self.insertion_noise(tokens, self.insert_ratio) + + if self.rotate_ratio > 0.0: + tokens = self.rolling_noise(tokens, self.rotate_ratio) + return tokens + + def __repr__(self): + cls_name = type(self).__name__ + kwargs = {} + if self.permute_sent_ratio > 0.0: + kwargs['permute_sent_ratio'] = self.permute_sent_ratio + kwargs['full_stop_token'] = self.full_stop_token + if self.insert_ratio > 0.0: + kwargs['insert_ratio'] = self.insert_ratio + if self.rotate_ratio > 0.0: + kwargs['rotate_ratio'] = self.rotate_ratio + if self.random_ratio > 0.0: + kwargs['random_ratio'] = self.random_ratio + if self.mask_ratio > 0.0: + kwargs['mask_ratio'] = self.mask_ratio + kwargs['mask_length'] = self.mask_length + kwargs['poisson_lambda'] = self.poisson_lambda + kwargs['replace_length'] = self.replace_length + cls_args = ', '.join( + [f'{kw}={arg}' for kw, arg in kwargs.items()]) + return '{}({})'.format(cls_name, cls_args) + + +@register_transform(name='bart') +class BARTNoiseTransform(Transform): + def __init__(self, opts): + super().__init__(opts) + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to BART.""" + group = parser.add_argument_group("Transform/BART") + group.add("--permute_sent_ratio", "-permute_sent_ratio", + type=float, default=0.0, + help="Permute this proportion of sentences " + "(boundaries defined by {}) in all inputs.".format( + DefaultTokens.SENT_FULL_STOPS)) + group.add("--rotate_ratio", "-rotate_ratio", type=float, default=0.5, + help="Rotate this proportion of inputs.") + group.add("--insert_ratio", "-insert_ratio", type=float, default=0.0, + help="Insert this percentage of additional random tokens.") + group.add("--random_ratio", "-random_ratio", type=float, default=0.0, + help="Instead of using {}, use random token " + "this often.".format(DefaultTokens.MASK)) + + group.add("--mask_ratio", "-mask_ratio", type=float, default=0.0, + help="Fraction of words/subwords that will be masked.") + group.add("--mask_length", "-mask_length", type=str, default="subword", + choices=["subword", "word", "span-poisson"], + help="Length of masking window to apply.") + group.add("--poisson_lambda", "-poisson_lambda", + type=float, default=0.0, + help="Lambda for Poisson distribution to sample span length " + "if `-mask_length` set to span-poisson.") + group.add("--replace_length", "-replace_length", + type=int, default=-1, choices=[-1, 0, 1], + help="When masking N tokens, replace with 0, 1, " + "or N tokens. (use -1 for N)") + + def warm_up(self, vocabs): + self.vocab = vocabs + + subword_type = self.opts.src_subword_type + if self.opts.mask_length == 'subword': + if subword_type == 'none': + raise ValueError( + f'src_subword_type={subword_type} incompatible with ' + f'mask_length={self.opts.mask_length}!') + is_joiner = (subword_type == 'bpe') if subword_type != 'none' else None + self.bart_noise = BARTNoising( + vocabs, + mask_tok=DefaultTokens.MASK, + mask_ratio=self.opts.mask_ratio, + insert_ratio=self.opts.insert_ratio, + permute_sent_ratio=self.opts.permute_sent_ratio, + poisson_lambda=self.opts.poisson_lambda, + replace_length=self.opts.replace_length, + rotate_ratio=self.opts.rotate_ratio, + mask_length=self.opts.mask_length, + random_ratio=self.opts.random_ratio, + is_joiner=is_joiner + ) + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply BART noise to src side tokens.""" + if is_train and self.vocab is not None: + src = self.bart_noise.apply(example['src']) + example['src'] = src + return example + + def _repr_args(self): + """Return str represent key arguments for BART.""" + return repr(self.bart_noise) diff --git a/onmt/transforms/misc.py b/onmt/transforms/misc.py new file mode 100644 index 0000000000..01a7ef7945 --- /dev/null +++ b/onmt/transforms/misc.py @@ -0,0 +1,104 @@ +from onmt.utils.logging import logger +from onmt.transforms import register_transform +from .transform import Transform + + +@register_transform(name='filtertoolong') +class FilterTooLongTransform(Transform): + """Filter out sentence that are too long.""" + + def __init__(self, opts): + super().__init__(opts) + self.src_seq_length = opts.src_seq_length + self.tgt_seq_length = opts.tgt_seq_length + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + group = parser.add_argument_group("Transform/Filter") + group.add("--src_seq_length", "-src_seq_length", type=int, default=200, + help="Maximum source sequence length.") + group.add("--tgt_seq_length", "-tgt_seq_length", type=int, default=200, + help="Maximum target sequence length.") + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Return None if too long else return as is.""" + if (len(example['src']) > self.src_seq_length or + len(example['tgt']) > self.tgt_seq_length): + if stats is not None: + stats.filter_too_long() + return None + else: + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}, {}={}'.format( + 'src_seq_length', self.src_seq_length, + 'tgt_seq_length', self.tgt_seq_length + ) + + +@register_transform(name='prefix') +class PrefixTransform(Transform): + """Add Prefix to src (& tgt) sentence.""" + + def __init__(self, opts): + super().__init__(opts) + self.prefix_dict = self.get_prefix_dict(self.opts) + + @staticmethod + def _get_prefix(corpus): + """Get prefix string of a `corpus`.""" + if 'prefix' in corpus['transforms']: + prefix = { + 'src': corpus['src_prefix'], + 'tgt': corpus['tgt_prefix'] + } + else: + prefix = None + return prefix + + @classmethod + def get_prefix_dict(cls, opts): + """Get all needed prefix correspond to corpus in `opts`.""" + prefix_dict = {} + for c_name, corpus in opts.data.items(): + prefix = cls._get_prefix(corpus) + if prefix is not None: + logger.info(f"Get prefix for {c_name}: {prefix}") + prefix_dict[c_name] = prefix + return prefix_dict + + @classmethod + def get_specials(cls, opts): + """Get special vocabs added by prefix transform.""" + prefix_dict = cls.get_prefix_dict(opts) + src_specials, tgt_specials = set(), set() + for _, prefix in prefix_dict.items(): + src_specials.update(prefix['src'].split()) + tgt_specials.update(prefix['tgt'].split()) + return (src_specials, tgt_specials) + + def _prepend(self, example, prefix): + """Prepend `prefix` to `tokens`.""" + for side, side_prefix in prefix.items(): + example[side] = side_prefix.split() + example[side] + return example + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply prefix prepend to example. + + Should provide `corpus_name` to get correspond prefix. + """ + corpus_name = kwargs.get('corpus_name', None) + if corpus_name is None: + raise ValueError('corpus_name is required.') + corpus_prefix = self.prefix_dict.get(corpus_name, None) + if corpus_prefix is None: + raise ValueError(f'prefix for {corpus_name} does not exist.') + return self._prepend(example, corpus_prefix) + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}'.format('prefix_dict', self.prefix_dict) diff --git a/onmt/transforms/sampling.py b/onmt/transforms/sampling.py new file mode 100644 index 0000000000..97d01aaf2d --- /dev/null +++ b/onmt/transforms/sampling.py @@ -0,0 +1,185 @@ +"""Transforms relate to hamming distance sampling.""" +import random +import numpy as np +from onmt.utils.logging import logger +from onmt.constants import DefaultTokens +from onmt.transforms import register_transform +from .transform import Transform + + +class HammingDistanceSampling(object): + """Functions related to (negative) Hamming Distance Sampling.""" + + def _softmax(self, x): + softmax = np.exp(x)/sum(np.exp(x)) + return softmax + + def _sample_replace(self, vocab, reject): + """Sample a token from `vocab` other than `reject`.""" + token = reject + while token == reject: + token = random.choice(vocab) + return token + + def _sample_distance(self, tokens, temperature): + """Sample number of tokens to corrupt from `tokens`.""" + n_tokens = len(tokens) + indices = np.arange(n_tokens) + logits = indices * -1 * temperature + probs = self._softmax(logits) + distance = np.random.choice(indices, p=probs) + return distance + + def _sample_position(self, tokens, distance): + n_tokens = len(tokens) + chosen_indices = random.sample(range(n_tokens), k=distance) + return chosen_indices + + +@register_transform(name='switchout') +class SwitchOutTransform(Transform, HammingDistanceSampling): + """ + SwitchOut. + :cite:`DBLP:journals/corr/abs-1808-07512` + """ + + def __init__(self, opts): + super().__init__(opts) + + def warm_up(self, vocabs): + self.vocab = vocabs + if vocabs is None: + logger.warning( + "Switchout disable as no vocab, shouldn't happen in training!") + self.temperature = self.opts.switchout_temperature + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + group = parser.add_argument_group("Transform/SwitchOut") + group.add("-switchout_temperature", "--switchout_temperature", + type=float, default=1.0, + help="Sampling temperature for SwitchOut. tau^(-1) " + "in :cite:`DBLP:journals/corr/abs-1808-07512`. " + "Smaller value makes data more diverse.") + + def _switchout(self, tokens, vocab, stats=None): + assert vocab is not None, "vocab can not be None for SwitchOut." + # 1. sample number of tokens to corrupt + n_chosen = self._sample_distance(tokens, self.temperature) + # 2. sample positions to corrput + chosen_indices = self._sample_position(tokens, distance=n_chosen) + # 3. sample corrupted values + out = [] + for (i, tok) in enumerate(tokens): + if i in chosen_indices: + tok = self._sample_replace(vocab, reject=tok) + out.append(tok) + else: + out.append(tok) + if stats is not None: + stats.switchout(n_switchout=n_chosen, n_total=len(tokens)) + return out + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply switchout to both src and tgt side tokens.""" + if is_train and self.vocab is not None: + src = self._switchout(example['src'], self.vocab['src'], stats) + tgt = self._switchout(example['tgt'], self.vocab['tgt'], stats) + example['src'], example['tgt'] = src, tgt + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}'.format('switchout_temperature', self.temperature) + + +@register_transform(name='tokendrop') +class TokenDropTransform(Transform, HammingDistanceSampling): + """Random drop tokens from sentence.""" + + def __init__(self, opts): + super().__init__(opts) + self.temperature = self.opts.tokendrop_temperature + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + group = parser.add_argument_group("Transform/Token_Drop") + group.add("-tokendrop_temperature", "--tokendrop_temperature", + type=float, default=1.0, + help="Sampling temperature for token deletion.") + + def _token_drop(self, tokens, stats=None): + # 1. sample number of tokens to corrupt + n_chosen = self._sample_distance(tokens, self.temperature) + # 2. sample positions to corrput + chosen_indices = self._sample_position(tokens, distance=n_chosen) + # 3. Drop token on chosen position + out = [tok for (i, tok) in enumerate(tokens) + if i not in chosen_indices] + if stats is not None: + stats.token_drop(n_dropped=n_chosen, n_total=len(tokens)) + return out + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply token drop to both src and tgt side tokens.""" + if is_train: + src = self._token_drop(example['src'], stats) + tgt = self._token_drop(example['tgt'], stats) + example['src'], example['tgt'] = src, tgt + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}'.format('worddrop_temperature', self.temperature) + + +@register_transform(name='tokenmask') +class TokenMaskTransform(Transform, HammingDistanceSampling): + """Random mask tokens from src sentence.""" + + MASK_TOK = DefaultTokens.MASK + + def __init__(self, opts): + super().__init__(opts) + self.temperature = opts.tokenmask_temperature + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + group = parser.add_argument_group("Transform/Token_Mask") + group.add('-tokenmask_temperature', '--tokenmask_temperature', + type=float, default=1.0, + help="Sampling temperature for token masking.") + + @classmethod + def get_specials(cls, opts): + """Get special vocabs added by prefix transform.""" + return ({cls.MASK_TOK}, set()) + + def _token_mask(self, tokens, stats=None): + # 1. sample number of tokens to corrupt + n_chosen = self._sample_distance(tokens, self.temperature) + # 2. sample positions to corrput + chosen_indices = self._sample_position(tokens, distance=n_chosen) + # 3. mask word on chosen position + out = [] + for (i, tok) in enumerate(tokens): + tok = self.MASK_TOK if i in chosen_indices else tok + out.append(tok) + if stats is not None: + stats.token_mask(n_masked=n_chosen, n_total=len(tokens)) + return out + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply word drop to both src and tgt side tokens.""" + if is_train: + src = self._token_mask(example['src'], stats) + tgt = self._token_mask(example['tgt'], stats) + example['src'], example['tgt'] = src, tgt + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '{}={}'.format('tokenmask_temperature', self.temperature) diff --git a/onmt/transforms/tokenize.py b/onmt/transforms/tokenize.py new file mode 100644 index 0000000000..feb337f84a --- /dev/null +++ b/onmt/transforms/tokenize.py @@ -0,0 +1,359 @@ +"""Transforms relate to tokenization/subword.""" +from onmt.utils.logging import logger +from onmt.transforms import register_transform +from .transform import Transform + + +class TokenizerTransform(Transform): + """Tokenizer transform abstract class.""" + + def __init__(self, opts): + """Initialize neccessary options for Tokenizer.""" + super().__init__(opts) + self._parse_opts() + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to Subword.""" + # Sharing options among `TokenizerTransform`s, same name conflict in + # this scope will be resolved by remove previous occurrence in parser + group = parser.add_argument_group( + 'Transform/Subword/Common', conflict_handler='resolve') + group.add('-src_subword_model', '--src_subword_model', + help="Path of subword model for src (or shared).") + group.add("-tgt_subword_model", "--tgt_subword_model", + help="Path of subword model for tgt.") + + # subword regularization(or BPE dropout) options: + group.add('-src_subword_nbest', '--src_subword_nbest', + type=int, default=1, + help="Number of candidates in subword regularization. " + "Valid for unigram sampling, " + "invalid for BPE-dropout. " + "(source side)") + group.add('-tgt_subword_nbest', '--tgt_subword_nbest', + type=int, default=1, + help="Number of candidates in subword regularization. " + "Valid for unigram sampling, " + "invalid for BPE-dropout. " + "(target side)") + group.add('-src_subword_alpha', '--src_subword_alpha', + type=float, default=0, + help="Smoothing parameter for sentencepiece unigram " + "sampling, and dropout probability for BPE-dropout. " + "(source side)") + group.add('-tgt_subword_alpha', '--tgt_subword_alpha', + type=float, default=0, + help="Smoothing parameter for sentencepiece unigram " + "sampling, and dropout probability for BPE-dropout. " + "(target side)") + + @classmethod + def _validate_options(cls, opts): + """Extra checks for Subword options.""" + assert 0 <= opts.src_subword_alpha <= 1, \ + "src_subword_alpha should be in the range [0, 1]" + assert 0 <= opts.tgt_subword_alpha <= 1, \ + "tgt_subword_alpha should be in the range [0, 1]" + + def _parse_opts(self): + raise NotImplementedError + + def _set_subword_opts(self): + """Set necessary options relate to subword.""" + self.share_vocab = self.opts.share_vocab + self.src_subword_model = self.opts.src_subword_model + self.tgt_subword_model = self.opts.tgt_subword_model + self.src_subword_nbest = self.opts.src_subword_nbest + self.tgt_subword_nbest = self.opts.tgt_subword_nbest + self.src_subword_alpha = self.opts.src_subword_alpha + self.tgt_subword_alpha = self.opts.tgt_subword_alpha + + def __getstate__(self): + """Pickling following for rebuild.""" + return self.opts + + def __setstate__(self, opts): + """Reload when unpickling from save file.""" + self.opts = opts + self._parse_opts() + self.warm_up() + + def _repr_args(self): + """Return str represent key arguments for TokenizerTransform.""" + kwargs = { + 'share_vocab': self.share_vocab, + 'src_subword_model': self.src_subword_model, + 'tgt_subword_model': self.tgt_subword_model, + 'src_subword_alpha': self.src_subword_alpha, + 'tgt_subword_alpha': self.tgt_subword_alpha + } + return ', '.join([f'{kw}={arg}' for kw, arg in kwargs.items()]) + + +@register_transform(name='sentencepiece') +class SentencePieceTransform(TokenizerTransform): + """SentencePiece subword transform class.""" + + def __init__(self, opts): + """Initialize neccessary options for sentencepiece.""" + super().__init__(opts) + self._parse_opts() + + def _parse_opts(self): + self._set_subword_opts() + + def warm_up(self, vocabs=None): + """Load subword models.""" + import sentencepiece as spm + load_src_model = spm.SentencePieceProcessor() + load_src_model.Load(self.src_subword_model) + if self.share_vocab: + self.load_models = { + 'src': load_src_model, + 'tgt': load_src_model + } + else: + load_tgt_model = spm.SentencePieceProcessor() + load_tgt_model.Load(self.tgt_subword_model) + self.load_models = { + 'src': load_src_model, + 'tgt': load_tgt_model + } + + def _tokenize(self, tokens, side='src', is_train=False): + """Do sentencepiece subword tokenize.""" + sp_model = self.load_models[side] + sentence = ' '.join(tokens) + nbest_size = self.tgt_subword_nbest if side == 'tgt' else \ + self.src_subword_nbest + alpha = self.tgt_subword_alpha if side == 'tgt' else \ + self.src_subword_alpha + if is_train is False or nbest_size in [0, 1]: + # derterministic subwording + segmented = sp_model.encode(sentence, out_type=str) + else: + # subword sampling when nbest_size > 1 or -1 + # alpha should be 0.0 < alpha < 1.0 + segmented = sp_model.encode( + sentence, out_type=str, enable_sampling=True, + alpha=alpha, nbest_size=nbest_size) + return segmented + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply sentencepiece subword encode to src & tgt.""" + src_out = self._tokenize(example['src'], 'src', is_train) + tgt_out = self._tokenize(example['tgt'], 'tgt', is_train) + if stats is not None: + n_words = len(example['src']) + len(example['tgt']) + n_subwords = len(src_out) + len(tgt_out) + stats.subword(n_subwords, n_words) + example['src'], example['tgt'] = src_out, tgt_out + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + kwargs_str = super()._repr_args() + additional_str = 'src_subword_nbest={}, tgt_subword_nbest={}'.format( + self.src_subword_nbest, self.tgt_subword_nbest + ) + return kwargs_str + ', ' + additional_str + + +@register_transform(name='bpe') +class BPETransform(TokenizerTransform): + def __init__(self, opts): + """Initialize neccessary options for subword_nmt.""" + super().__init__(opts) + self._parse_opts() + + def _parse_opts(self): + self._set_subword_opts() + self.dropout = {'src': self.src_subword_alpha, + 'tgt': self.tgt_subword_alpha} + + def warm_up(self, vocabs=None): + """Load subword models.""" + from subword_nmt.apply_bpe import BPE + import codecs + src_codes = codecs.open(self.src_subword_model, encoding='utf-8') + load_src_model = BPE(codes=src_codes) + if self.share_vocab: + self.load_models = { + 'src': load_src_model, + 'tgt': load_src_model + } + else: + tgt_codes = codecs.open(self.tgt_subword_model, encoding='utf-8') + load_tgt_model = BPE(codes=tgt_codes) + self.load_models = { + 'src': load_src_model, + 'tgt': load_tgt_model + } + + def _tokenize(self, tokens, side='src', is_train=False): + """Do bpe subword tokenize.""" + bpe_model = self.load_models[side] + dropout = self.dropout[side] if is_train else 0 + segmented = bpe_model.segment_tokens(tokens, dropout=dropout) + return segmented + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply bpe subword encode to src & tgt.""" + src_out = self._tokenize(example['src'], 'src', is_train) + tgt_out = self._tokenize(example['tgt'], 'tgt', is_train) + if stats is not None: + n_words = len(example['src']) + len(example['tgt']) + n_subwords = len(src_out) + len(tgt_out) + stats.subword(n_subwords, n_words) + example['src'], example['tgt'] = src_out, tgt_out + return example + + +@register_transform(name='onmt_tokenize') +class ONMTTokenizerTransform(TokenizerTransform): + """OpenNMT Tokenizer transform class.""" + + def __init__(self, opts): + """Initialize neccessary options for OpenNMT Tokenizer.""" + super().__init__(opts) + self._parse_opts() + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to Subword.""" + super().add_options(parser) + group = parser.add_argument_group('Transform/Subword/ONMTTOK') + group.add('-src_subword_type', '--src_subword_type', + type=str, default='none', + choices=['none', 'sentencepiece', 'bpe'], + help="Type of subword model for src (or shared) " + "in pyonmttok.") + group.add('-tgt_subword_type', '--tgt_subword_type', + type=str, default='none', + choices=['none', 'sentencepiece', 'bpe'], + help="Type of subword model for tgt in pyonmttok.") + group.add('-src_onmttok_kwargs', '--src_onmttok_kwargs', type=str, + default="{'mode': 'none'}", + help="Other pyonmttok options for src in dict string, " + "except subword related options listed earlier.") + group.add('-tgt_onmttok_kwargs', '--tgt_onmttok_kwargs', type=str, + default="{'mode': 'none'}", + help="Other pyonmttok options for tgt in dict string, " + "except subword related options listed earlier.") + + @classmethod + def _validate_options(cls, opts): + """Extra checks for OpenNMT Tokenizer options.""" + super()._validate_options(opts) + src_kwargs_dict = eval(opts.src_onmttok_kwargs) + tgt_kwargs_dict = eval(opts.tgt_onmttok_kwargs) + if not isinstance(src_kwargs_dict, dict): + raise ValueError(f"-src_onmttok_kwargs isn't a dict valid string.") + if not isinstance(tgt_kwargs_dict, dict): + raise ValueError(f"-tgt_onmttok_kwargs isn't a dict valid string.") + opts.src_onmttok_kwargs = src_kwargs_dict + opts.tgt_onmttok_kwargs = tgt_kwargs_dict + + def _set_subword_opts(self): + """Set all options relate to subword for OpenNMT/Tokenizer.""" + super()._set_subword_opts() + self.src_subword_type = self.opts.src_subword_type + self.tgt_subword_type = self.opts.tgt_subword_type + + def _parse_opts(self): + self._set_subword_opts() + logger.info("Parsed pyonmttok kwargs for src: {}".format( + self.opts.src_onmttok_kwargs)) + logger.info("Parsed pyonmttok kwargs for tgt: {}".format( + self.opts.tgt_onmttok_kwargs)) + self.src_other_kwargs = self.opts.src_onmttok_kwargs + self.tgt_other_kwargs = self.opts.tgt_onmttok_kwargs + + @classmethod + def get_specials(cls, opts): + src_specials, tgt_specials = set(), set() + if opts.src_onmttok_kwargs.get("case_markup", False): + _case_specials = ['⦅mrk_case_modifier_C⦆', + '⦅mrk_begin_case_region_U⦆', + '⦅mrk_end_case_region_U⦆'] + src_specials.update(_case_specials) + if opts.tgt_onmttok_kwargs.get("case_markup", False): + _case_specials = ['⦅mrk_case_modifier_C⦆', + '⦅mrk_begin_case_region_U⦆', + '⦅mrk_end_case_region_U⦆'] + tgt_specials.update(_case_specials) + return (set(), set()) + + def _get_subword_kwargs(self, side='src'): + """Return a dict containing kwargs relate to `side` subwords.""" + subword_type = self.tgt_subword_type if side == 'tgt' \ + else self.src_subword_type + subword_model = self.tgt_subword_model if side == 'tgt' \ + else self.src_subword_model + subword_nbest = self.tgt_subword_nbest if side == 'tgt' \ + else self.src_subword_nbest + subword_alpha = self.tgt_subword_alpha if side == 'tgt' \ + else self.src_subword_alpha + kwopts = dict() + if subword_type == 'bpe': + kwopts['bpe_model_path'] = subword_model + kwopts['bpe_dropout'] = subword_alpha + elif subword_type == 'sentencepiece': + kwopts['sp_model_path'] = subword_model + kwopts['sp_nbest_size'] = subword_nbest + kwopts['sp_alpha'] = subword_alpha + else: + logger.warning('No subword method will be applied.') + return kwopts + + def warm_up(self, vocab=None): + """Initilize Tokenizer models.""" + import pyonmttok + src_subword_kwargs = self._get_subword_kwargs(side='src') + src_tokenizer = pyonmttok.Tokenizer( + **src_subword_kwargs, **self.src_other_kwargs + ) + if self.share_vocab: + self.load_models = { + 'src': src_tokenizer, + 'tgt': src_tokenizer + } + else: + tgt_subword_kwargs = self._get_subword_kwargs(side='tgt') + tgt_tokenizer = pyonmttok.Tokenizer( + **tgt_subword_kwargs, **self.tgt_other_kwargs + ) + self.load_models = { + 'src': src_tokenizer, + 'tgt': tgt_tokenizer + } + + def _tokenize(self, tokens, side='src', is_train=False): + """Do OpenNMT Tokenizer's tokenize.""" + tokenizer = self.load_models[side] + sentence = ' '.join(tokens) + segmented, _ = tokenizer.tokenize(sentence) + return segmented + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply OpenNMT Tokenizer to src & tgt.""" + src_out = self._tokenize(example['src'], 'src') + tgt_out = self._tokenize(example['tgt'], 'tgt') + if stats is not None: + n_words = len(example['src']) + len(example['tgt']) + n_subwords = len(src_out) + len(tgt_out) + stats.subword(n_subwords, n_words) + example['src'], example['tgt'] = src_out, tgt_out + return example + + def _repr_args(self): + """Return str represent key arguments for class.""" + repr_str = '{}={}'.format('share_vocab', self.share_vocab) + repr_str += ', src_subword_kwargs={}'.format( + self._get_subword_kwargs(side='src')) + repr_str += ', src_onmttok_kwargs={}'.format(self.src_other_kwargs) + repr_str += ', tgt_subword_kwargs={}'.format( + self._get_subword_kwargs(side='tgt')) + repr_str += ', tgt_onmttok_kwargs={}'.format(self.tgt_other_kwargs) + return repr_str diff --git a/onmt/transforms/transform.py b/onmt/transforms/transform.py new file mode 100644 index 0000000000..578abd795a --- /dev/null +++ b/onmt/transforms/transform.py @@ -0,0 +1,206 @@ +"""Base Transform class and relate utils.""" +import os +import torch +from onmt.utils.logging import logger +from onmt.utils.misc import check_path +from onmt.inputters.fields import get_vocabs + + +class Transform(object): + """A Base class that every transform method should derived from.""" + def __init__(self, opts): + self.opts = opts + + def warm_up(self, vocabs=None): + pass + + @classmethod + def add_options(cls, parser): + """Avalilable options relate to this Transform.""" + pass + + @classmethod + def _validate_options(cls, opts): + """Extra checks to validate options added from `add_options`.""" + pass + + @classmethod + def get_specials(cls, opts): + return (set(), set()) + + def apply(self, example, is_train=False, stats=None, **kwargs): + """Apply transform to `example`. + + Args: + example (dict): a dict of field value, ex. src, tgt; + is_train (bool): Indicate if src/tgt is training data; + stats (TransformStatistics): a statistic object. + """ + raise NotImplementedError + + def stats(self): + """Return statistic message.""" + return '' + + def _repr_args(self): + """Return str represent key arguments for class.""" + return '' + + def __repr__(self): + cls_name = type(self).__name__ + cls_args = self._repr_args() + return '{}({})'.format(cls_name, cls_args) + + +class TransformStatistics(object): + """Return a statistic counter for Transform.""" + + def __init__(self): + """Initialize statistic counter.""" + self.reset() + + def reset(self): + """Statistic counters for all transforms.""" + self.filtered = 0 + self.words, self.subwords = 0, 0 + self.n_switchouted, self.so_total = 0, 0 + self.n_dropped, self.td_total = 0, 0 + self.n_masked, self.tm_total = 0, 0 + + def filter_too_long(self): + """Update filtered sentence counter.""" + self.filtered += 1 + + def subword(self, subwords, words): + """Update subword counter.""" + self.words += words + self.subwords += subwords + + def switchout(self, n_switchout, n_total): + """Update switchout counter.""" + self.n_switchouted += n_switchout + self.so_total += n_total + + def token_drop(self, n_dropped, n_total): + """Update token drop counter.""" + self.n_dropped += n_dropped + self.td_total += n_total + + def token_mask(self, n_masked, n_total): + """Update token mask counter.""" + self.n_masked += n_masked + self.tm_total += n_total + + def report(self): + """Return transform statistics report and reset counter.""" + msg = '' + if self.filtered > 0: + msg += f'Filtred sentence: {self.filtered} sent\n'.format() + if self.words > 0: + msg += f'Subword(SP/Tokenizer): {self.words} -> {self.subwords} tok\n' # noqa: E501 + if self.so_total > 0: + msg += f'SwitchOut: {self.n_switchouted}/{self.so_total} tok\n' + if self.td_total > 0: + msg += f'Token dropped: {self.n_dropped}/{self.td_total} tok\n' + if self.tm_total > 0: + msg += f'Token masked: {self.n_masked}/{self.tm_total} tok\n' + self.reset() + return msg + + +class TransformPipe(Transform): + """Pipeline built by a list of Transform instance.""" + + def __init__(self, opts, transform_list): + """Initialize pipeline by a list of transform instance.""" + self.opts = None # opts is not required + self.transforms = transform_list + self.statistics = TransformStatistics() + + @classmethod + def build_from(cls, transform_list): + """Return a `TransformPipe` instance build from `transform_list`.""" + for transform in transform_list: + assert isinstance(transform, Transform), \ + "transform should be a instance of Transform." + transform_pipe = cls(None, transform_list) + return transform_pipe + + def warm_up(self, vocabs): + """Warm up Pipeline by iterate over all transfroms.""" + for transform in self.transforms: + transform.warm_up(vocabs) + + @classmethod + def get_specials(cls, opts, transforms): + """Return all specials introduced by `transforms`.""" + src_specials, tgt_specials = set(), set() + for transform in transforms: + _src_special, _tgt_special = transform.get_specials(transform.opts) + src_specials.update(_src_special) + tgt_specials.update(tgt_specials) + return (src_specials, tgt_specials) + + def apply(self, example, is_train=False, **kwargs): + """Apply transform pipe to `example`. + + Args: + example (dict): a dict of field value, ex. src, tgt. + + """ + for transform in self.transforms: + example = transform.apply( + example, is_train=is_train, stats=self.statistics, **kwargs) + if example is None: + break + return example + + def stats(self): + """Return statistic message.""" + return self.statistics.report() + + def _repr_args(self): + """Return str represent key arguments for class.""" + info_args = [] + for transform in self.transforms: + info_args.append(repr(transform)) + return ', '.join(info_args) + + +def make_transforms(opts, transforms_cls, fields): + """Build transforms in `transforms_cls` with vocab of `fields`.""" + vocabs = get_vocabs(fields) if fields is not None else None + transforms = {} + for name, transform_cls in transforms_cls.items(): + transform_obj = transform_cls(opts) + transform_obj.warm_up(vocabs) + transforms[name] = transform_obj + return transforms + + +def get_specials(opts, transforms_cls_dict): + """Get specials of transforms that should be registed in Vocab.""" + all_specials = {'src': set(), 'tgt': set()} + for name, transform_cls in transforms_cls_dict.items(): + src_specials, tgt_specials = transform_cls.get_specials(opts) + all_specials['src'].update(src_specials) + all_specials['tgt'].update(tgt_specials) + logger.info(f"Get special vocabs from Transforms: {all_specials}.") + return all_specials + + +def save_transforms(transforms, save_data, overwrite=True): + """Dump `transforms` object.""" + transforms_path = "{}.transforms.pt".format(save_data) + os.makedirs(os.path.dirname(transforms_path), exist_ok=True) + check_path(transforms_path, exist_ok=overwrite, log=logger.warning) + logger.info(f"Saving Transforms to {transforms_path}.") + torch.save(transforms, transforms_path) + + +def load_transforms(opts): + """Load dumped `transforms` object.""" + transforms_path = "{}.transforms.pt".format(opts.save_data) + transforms = torch.load(transforms_path) + logger.info("Transforms loaded.") + return transforms diff --git a/onmt/translate/translation.py b/onmt/translate/translation.py index 1de69a6346..40d8df1a28 100644 --- a/onmt/translate/translation.py +++ b/onmt/translate/translation.py @@ -3,6 +3,7 @@ import os import torch +from onmt.constants import DefaultTokens from onmt.inputters.text_dataset import TextMultiField from onmt.utils.alignment import build_align_pharaoh @@ -35,7 +36,8 @@ def __init__(self, data, fields, n_best=1, replace_unk=False, if phrase_table != "" and os.path.exists(phrase_table): with open(phrase_table) as phrase_table_fd: for line in phrase_table_fd: - phrase_src, phrase_trg = line.rstrip("\n").split("|||") + phrase_src, phrase_trg = line.rstrip("\n").split( + DefaultTokens.PHRASE_TABLE_SEPARATOR) self.phrase_table_dict[phrase_src] = phrase_trg self.has_tgt = has_tgt diff --git a/onmt/translate/translation_server.py b/onmt/translate/translation_server.py index 8c3f4fc44c..928c524798 100644 --- a/onmt/translate/translation_server.py +++ b/onmt/translate/translation_server.py @@ -16,6 +16,7 @@ from itertools import islice, zip_longest from copy import deepcopy +from onmt.constants import DefaultTokens from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed from onmt.utils.misc import check_model_config @@ -760,7 +761,7 @@ def maybe_detokenize_with_align(self, sequence, src, side='tgt'): align = None if self.opt.report_align: # output contain alignment - sequence, align = sequence.split(' ||| ') + sequence, align = sequence.split(DefaultTokens.ALIGNMENT_SEPARATOR) if align != '': align = self.maybe_convert_align(src, sequence, align) sequence = self.maybe_detokenize(sequence, side) diff --git a/onmt/translate/translator.py b/onmt/translate/translator.py index 10c8928185..521133c3fe 100644 --- a/onmt/translate/translator.py +++ b/onmt/translate/translator.py @@ -9,6 +9,7 @@ import torch +from onmt.constants import DefaultTokens import onmt.model_builder import onmt.inputters as inputters import onmt.decoders.ensemble @@ -231,7 +232,7 @@ def from_opt( report_score (bool) : See :func:`__init__()`. logger (logging.Logger or NoneType): See :func:`__init__()`. """ - + # TODO: maybe add dynamic part src_reader = inputters.str2reader[opt.data_type].from_opt(opt) tgt_reader = inputters.str2reader["text"].from_opt(opt) return cls( @@ -286,7 +287,6 @@ def translate( self, src, tgt=None, - src_dir=None, batch_size=None, batch_type="sents", attn_debug=False, @@ -297,8 +297,6 @@ def translate( Args: src: See :func:`self.src_reader.read()`. tgt: See :func:`self.tgt_reader.read()`. - src_dir: See :func:`self.src_reader.read()` (only relevant - for certain types of data). batch_size (int): size of examples per mini-batch attn_debug (bool): enables the attention logging align_debug (bool): enables the word alignment logging @@ -317,16 +315,13 @@ def translate( if self.tgt_prefix and tgt is None: raise ValueError('Prefix should be feed to tgt if -tgt_prefix.') - src_data = {"reader": self.src_reader, "data": src, "dir": src_dir} - tgt_data = {"reader": self.tgt_reader, "data": tgt, "dir": None} - _readers, _data, _dir = inputters.Dataset.config( + src_data = {"reader": self.src_reader, "data": src} + tgt_data = {"reader": self.tgt_reader, "data": tgt} + _readers, _data = inputters.Dataset.config( [('src', src_data), ('tgt', tgt_data)]) - # corpus_id field is useless here - if self.fields.get("corpus_id", None) is not None: - self.fields.pop('corpus_id') data = inputters.Dataset( - self.fields, readers=_readers, data=_data, dirs=_dir, + self.fields, readers=_readers, data=_data, sort_key=inputters.str2sortkey[self.data_type], filter_pred=self._filter_pred ) @@ -378,9 +373,10 @@ def translate( in trans.word_aligns[:self.n_best]] n_best_preds_align = [" ".join(align) for align in align_pharaohs] - n_best_preds = [pred + " ||| " + align - for pred, align in zip( - n_best_preds, n_best_preds_align)] + n_best_preds = [ + pred + DefaultTokens.ALIGNMENT_SEPARATOR + align + for pred, align in zip( + n_best_preds, n_best_preds_align)] all_predictions += [n_best_preds] self.out_file.write('\n'.join(n_best_preds) + '\n') self.out_file.flush() @@ -395,7 +391,7 @@ def translate( if attn_debug: preds = trans.pred_sents[0] - preds.append('') + preds.append(DefaultTokens.EOS) attns = trans.attns[0].tolist() if self.data_type == 'text': srcs = trans.src_raw diff --git a/onmt/utils/alignment.py b/onmt/utils/alignment.py index f9c187deef..0a70edb33e 100644 --- a/onmt/utils/alignment.py +++ b/onmt/utils/alignment.py @@ -2,6 +2,7 @@ import torch from itertools import accumulate +from onmt.constants import SubwordMarker def make_batch_align_matrix(index_tensor, size=None, normalize=False): @@ -106,11 +107,11 @@ def to_word_align(src, tgt, subword_align, m_src='joiner', m_tgt='joiner'): subword_align = {(int(a), int(b)) for a, b in (x.split("-") for x in subword_align.split())} - src_map = (subword_map_by_spacer(src, marker='▁') if m_src == 'spacer' - else subword_map_by_joiner(src, marker='■')) + src_map = (subword_map_by_spacer(src) if m_src == 'spacer' + else subword_map_by_joiner(src)) - tgt_map = (subword_map_by_spacer(src, marker='▁') if m_tgt == 'spacer' - else subword_map_by_joiner(src, marker='■')) + tgt_map = (subword_map_by_spacer(src) if m_tgt == 'spacer' + else subword_map_by_joiner(src)) word_align = list({"{}-{}".format(src_map[a], tgt_map[b]) for a, b in subword_align}) @@ -119,7 +120,7 @@ def to_word_align(src, tgt, subword_align, m_src='joiner', m_tgt='joiner'): return " ".join(word_align) -def subword_map_by_joiner(subwords, marker='■'): +def subword_map_by_joiner(subwords, marker=SubwordMarker.JOINER): """Return word id for each subword token (annotate by joiner).""" flags = [0] * len(subwords) for i, tok in enumerate(subwords): @@ -135,7 +136,7 @@ def subword_map_by_joiner(subwords, marker='■'): return word_group -def subword_map_by_spacer(subwords, marker='▁'): +def subword_map_by_spacer(subwords, marker=SubwordMarker.SPACER): """Return word id for each subword token (annotate by spacer).""" word_group = list(accumulate([int(marker in x) for x in subwords])) if word_group[0] == 1: # when dummy prefix is set diff --git a/onmt/utils/distributed.py b/onmt/utils/distributed.py index 99d76d1352..e3ddf8b0d8 100644 --- a/onmt/utils/distributed.py +++ b/onmt/utils/distributed.py @@ -6,11 +6,15 @@ from __future__ import print_function +import os +import signal import math import pickle + import torch.distributed -from onmt.utils.logging import logger +from onmt.utils.misc import set_random_seed +from onmt.utils.logging import init_logger, logger def is_master(opt, device_id): @@ -120,3 +124,92 @@ def all_gather_list(data, max_size=4096): result = pickle.loads(bytes_list) results.append(result) return results + + +class ErrorHandler(object): + """A class that listens for exceptions in children processes and propagates + the tracebacks to the parent process.""" + + def __init__(self, error_queue): + """ init error handler """ + import signal + import threading + self.error_queue = error_queue + self.children_pids = [] + self.error_thread = threading.Thread( + target=self.error_listener, daemon=True) + self.error_thread.start() + signal.signal(signal.SIGUSR1, self.signal_handler) + + def add_child(self, pid): + """ error handler """ + self.children_pids.append(pid) + + def error_listener(self): + """ error listener """ + (rank, original_trace) = self.error_queue.get() + self.error_queue.put((rank, original_trace)) + os.kill(os.getpid(), signal.SIGUSR1) + + def signal_handler(self, signalnum, stackframe): + """ signal handler """ + for pid in self.children_pids: + os.kill(pid, signal.SIGINT) # kill children processes + (rank, original_trace) = self.error_queue.get() + msg = """\n\n-- Tracebacks above this line can probably + be ignored --\n\n""" + msg += original_trace + raise Exception(msg) + + +def batch_producer(generator_to_serve, queue, semaphore, opt): + """Produce batches to `queues` from `generator_to_serve`.""" + init_logger(opt.log_file) + set_random_seed(opt.seed, False) + + def pred(x): + """ + Filters batches that belong only + to gpu_ranks of current node + """ + for rank in opt.gpu_ranks: + if x[0] % opt.world_size == rank: + return True + + generator_to_serve = filter( + pred, enumerate(generator_to_serve)) + + def next_batch(): + # NOTE: stride (if needed) is handled at the + # generator (train_iter) level + new_batch = next(generator_to_serve) + semaphore.acquire() + return new_batch[1] + + b = next_batch() + + while True: + b.dataset = None + # Move batch to correspond device_id when consumer iterate + + # hack to dodge unpicklable `dict_keys` + b.fields = list(b.fields) + queue.put(b) + b = next_batch() + + +def consumer(process_fn, opt, device_id, error_queue, batch_queue, semaphore): # noqa: E501 + """Run `process_fn` on `device_id` with data from `batch_queue`.""" + try: + gpu_rank = multi_init(opt, device_id) + if gpu_rank != opt.gpu_ranks[device_id]: + raise AssertionError("An error occurred in \ + Distributed initialization") + process_fn(opt, device_id=device_id, + batch_queue=batch_queue, semaphore=semaphore) + except KeyboardInterrupt: + pass # killed by parent, do nothing + except Exception: + # propagate exception to parent process, keeping original traceback + import traceback + error_queue.put((opt.gpu_ranks[device_id], traceback.format_exc())) diff --git a/onmt/utils/misc.py b/onmt/utils/misc.py index 7b6cbebacc..5e686eb5a5 100644 --- a/onmt/utils/misc.py +++ b/onmt/utils/misc.py @@ -3,10 +3,19 @@ import torch import random import inspect +import numpy as np from itertools import islice, repeat import os +def check_path(path, exist_ok=False, log=print): + if os.path.exists(path): + if exist_ok: + log(f"path {path} exists, may overwrite...") + else: + raise IOError(f"path {path} exists, stop.") + + def split_corpus(path, shard_size, default=None): """yield a `list` containing `shard_size` line of `path`, or repeatly generate `default` if `path` is None. @@ -93,6 +102,8 @@ def set_random_seed(seed, is_cuda): # some cudnn methods can be random even after fixing the seed # unless you tell it to be deterministic torch.backends.cudnn.deterministic = True + # This one is needed for various tranfroms + np.random.seed(seed) if is_cuda and seed > 0: # These ensure same initialization in multi gpu mode diff --git a/onmt/utils/parse.py b/onmt/utils/parse.py index 273dae3dba..6e754b4d68 100644 --- a/onmt/utils/parse.py +++ b/onmt/utils/parse.py @@ -1,13 +1,138 @@ import configargparse as cfargparse import os - import torch import onmt.opts as opts from onmt.utils.logging import logger +from onmt.constants import CorpusName +from onmt.transforms import AVAILABLE_TRANSFORMS + + +class DataOptsCheckerMixin(object): + """Checker with methods for validate data related options.""" + + @staticmethod + def _validate_file(file_path, info): + """Check `file_path` is valid or raise `IOError`.""" + if not os.path.isfile(file_path): + raise IOError(f"Please check path of your {info} file!") + + @classmethod + def _validate_data(cls, opt): + """Parse corpora specified in data field of YAML file.""" + import yaml + default_transforms = opt.transforms + if len(default_transforms) != 0: + logger.info(f"Default transforms: {default_transforms}.") + corpora = yaml.safe_load(opt.data) + + for cname, corpus in corpora.items(): + # Check Transforms + _transforms = corpus.get('transforms', None) + if _transforms is None: + logger.info(f"Missing transforms field for {cname} data, " + f"set to default: {default_transforms}.") + corpus['transforms'] = default_transforms + # Check path + path_src = corpus.get('path_src', None) + path_tgt = corpus.get('path_tgt', None) + if path_src is None or path_tgt is None: + raise ValueError(f'Corpus {cname} path are required') + else: + cls._validate_file(path_src, info=f'{cname}/path_src') + cls._validate_file(path_tgt, info=f'{cname}/path_tgt') + path_align = corpus.get('path_align', None) + if path_align is None: + if hasattr(opt, 'lambda_align') and opt.lambda_align > 0.0: + raise ValueError(f'Corpus {cname} alignment file path are ' + 'required when lambda_align > 0.0') + corpus['path_align'] = None + else: + cls._validate_file(path_align, info=f'{cname}/path_align') + # Check prefix: will be used when use prefix transform + src_prefix = corpus.get('src_prefix', None) + tgt_prefix = corpus.get('tgt_prefix', None) + if src_prefix is None or tgt_prefix is None: + if 'prefix' in corpus['transforms']: + raise ValueError(f'Corpus {cname} prefix are required.') + # Check weight + weight = corpus.get('weight', None) + if weight is None: + if cname != CorpusName.VALID: + logger.warning(f"Corpus {cname}'s weight should be given." + " We default it to 1 for you.") + corpus['weight'] = 1 + logger.info(f"Parsed {len(corpora)} corpora from -data.") + opt.data = corpora + + @classmethod + def _validate_transforms_opts(cls, opt): + """Check options used by transforms.""" + for name, transform_cls in AVAILABLE_TRANSFORMS.items(): + if name in opt._all_transform: + transform_cls._validate_options(opt) + + @classmethod + def _get_all_transform(cls, opt): + """Should only called after `_validate_data`.""" + all_transforms = set(opt.transforms) + for cname, corpus in opt.data.items(): + _transforms = set(corpus['transforms']) + if len(_transforms) != 0: + all_transforms.update(_transforms) + if hasattr(opt, 'lambda_align') and opt.lambda_align > 0.0: + if not all_transforms.isdisjoint( + {'sentencepiece', 'bpe', 'onmt_tokenize'}): + raise ValueError('lambda_align is not compatible with' + ' on-the-fly tokenization.') + if not all_transforms.isdisjoint( + {'tokendrop', 'prefix', 'bart'}): + raise ValueError('lambda_align is not compatible yet with' + ' potentiel token deletion/addition.') + opt._all_transform = all_transforms + + @classmethod + def _validate_vocab_opts(cls, opt, build_vocab_only=False): + """Check options relate to vocab.""" + if opt.src_vocab: + cls._validate_file(opt.src_vocab, info='src vocab') + if opt.tgt_vocab: + cls._validate_file(opt.tgt_vocab, info='tgt vocab') + + if not build_vocab_only: + if opt.dump_fields or opt.dump_transforms: + assert opt.save_data, "-save_data should be set if set \ + -dump_fields or -dump_transforms." + # Check embeddings stuff + if opt.both_embeddings is not None: + assert (opt.src_embeddings is None + and opt.tgt_embeddings is None), \ + "You don't need -src_embeddings or -tgt_embeddings \ + if -both_embeddings is set." + + if any([opt.both_embeddings is not None, + opt.src_embeddings is not None, + opt.tgt_embeddings is not None]): + assert opt.embeddings_type is not None, \ + "You need to specify an -embedding_type!" + assert opt.save_data, "-save_data should be set if use \ + pretrained embeddings." + + @classmethod + def validate_prepare_opts(cls, opt, build_vocab_only=False): + """Validate all options relate to prepare (data/transform/vocab).""" + if opt.n_sample != 0: + assert opt.save_data, "-save_data should be set if \ + want save samples." + cls._validate_data(opt) + cls._get_all_transform(opt) + cls._validate_transforms_opts(opt) + cls._validate_vocab_opts(opt, build_vocab_only=build_vocab_only) -class ArgumentParser(cfargparse.ArgumentParser): +class ArgumentParser(cfargparse.ArgumentParser, DataOptsCheckerMixin): + """OpenNMT option parser powered with option check methods.""" + def __init__( self, config_file_parser_class=cfargparse.YAMLConfigFileParser, @@ -53,13 +178,12 @@ def update_model_opts(cls, model_opt): @classmethod def validate_model_opts(cls, model_opt): - assert model_opt.model_type in ["text", "img", "audio", "vec"], \ + assert model_opt.model_type in ["text"], \ "Unsupported model type %s" % model_opt.model_type - # this check is here because audio allows the encoder and decoder to - # be different sizes, but other model types do not yet + # encoder and decoder should be same sizes same_size = model_opt.enc_rnn_size == model_opt.dec_rnn_size - assert model_opt.model_type == 'audio' or same_size, \ + assert same_size, \ "The encoder and decoder rnns must be the same size for now" assert model_opt.rnn_type != "SRU" or model_opt.gpu_ranks, \ @@ -111,8 +235,6 @@ def validate_train_opts(cls, opt): raise AssertionError( "-gpu_ranks should have master(=0) rank " "unless -world_size is greater than len(gpu_ranks).") - assert len(opt.data_ids) == len(opt.data_weights), \ - "Please check -data_ids and -data_weights options!" assert len(opt.dropout) == len(opt.dropout_steps), \ "Number of dropout values must match accum_steps values" @@ -120,47 +242,10 @@ def validate_train_opts(cls, opt): assert len(opt.attention_dropout) == len(opt.dropout_steps), \ "Number of attention_dropout values must match accum_steps values" + assert len(opt.accum_count) == len(opt.accum_steps), \ + 'Number of accum_count values must match number of accum_steps' + @classmethod def validate_translate_opts(cls, opt): if opt.beam_size != 1 and opt.random_sampling_topk != 1: raise ValueError('Can either do beam search OR random sampling.') - - @classmethod - def validate_preprocess_args(cls, opt): - assert opt.max_shard_size == 0, \ - "-max_shard_size is deprecated. Please use \ - -shard_size (number of examples) instead." - assert opt.shuffle == 0, \ - "-shuffle is not implemented. Please shuffle \ - your data before pre-processing." - - assert len(opt.train_src) == len(opt.train_tgt), \ - "Please provide same number of src and tgt train files!" - - assert len(opt.train_src) == len(opt.train_ids), \ - "Please provide proper -train_ids for your data!" - - for file in opt.train_src + opt.train_tgt: - assert os.path.isfile(file), "Please check path of %s" % file - - if len(opt.train_align) == 1 and opt.train_align[0] is None: - opt.train_align = [None] * len(opt.train_src) - else: - assert len(opt.train_align) == len(opt.train_src), \ - "Please provide same number of word alignment train \ - files as src/tgt!" - for file in opt.train_align: - assert os.path.isfile(file), "Please check path of %s" % file - - assert not opt.valid_align or os.path.isfile(opt.valid_align), \ - "Please check path of your valid alignment file!" - - assert not opt.valid_src or os.path.isfile(opt.valid_src), \ - "Please check path of your valid src file!" - assert not opt.valid_tgt or os.path.isfile(opt.valid_tgt), \ - "Please check path of your valid tgt file!" - - assert not opt.src_vocab or os.path.isfile(opt.src_vocab), \ - "Please check path of your src vocab!" - assert not opt.tgt_vocab or os.path.isfile(opt.tgt_vocab), \ - "Please check path of your tgt vocab!" diff --git a/onmt/utils/report_manager.py b/onmt/utils/report_manager.py index 3bd3fa7e19..00b1fd2e26 100644 --- a/onmt/utils/report_manager.py +++ b/onmt/utils/report_manager.py @@ -9,7 +9,7 @@ def build_report_manager(opt, gpu_rank): - if opt.tensorboard and gpu_rank == 0: + if opt.tensorboard and gpu_rank <= 0: from torch.utils.tensorboard import SummaryWriter tensorboard_log_dir = opt.tensorboard_log_dir diff --git a/preprocess.py b/preprocess.py deleted file mode 100644 index c0c7742fa0..0000000000 --- a/preprocess.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python -from onmt.bin.preprocess import main - - -if __name__ == "__main__": - main() diff --git a/requirements.opt.txt b/requirements.opt.txt index 7868307672..2878746e58 100644 --- a/requirements.opt.txt +++ b/requirements.opt.txt @@ -1,12 +1,8 @@ cffi -torchvision joblib -librosa numba==0.43.0 llvmlite==0.32.1 -Pillow -git+git://github.com/pytorch/audio.git@d92de5b97fc6204db4b1e3ed20c03ac06f5d53f0 pyrouge -opencv-python git+git://github.com/NVIDIA/apex.git@700d6825e205732c1d6be511306ca4e595297070 -pretrainedmodels +sentencepiece>=0.1.90 +subword-nmt>=0.3.7 diff --git a/setup.py b/setup.py index 63ba60fe9e..71a180ed49 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ "tensorboard>=1.14", "flask", "waitress", - "pyonmttok==1.*;platform_system=='Linux'", + "pyonmttok>=1.19.*;platform_system=='Linux'", "pyyaml", ], entry_points={ @@ -37,9 +37,9 @@ "onmt_server=onmt.bin.server:main", "onmt_train=onmt.bin.train:main", "onmt_translate=onmt.bin.translate:main", - "onmt_preprocess=onmt.bin.preprocess:main", "onmt_release_model=onmt.bin.release_model:main", - "onmt_average_models=onmt.bin.average_models:main" + "onmt_average_models=onmt.bin.average_models:main", + "onmt_build_vocab=onmt.bin.build_vocab:main" ], } ) diff --git a/tools/embeddings_to_torch.py b/tools/embeddings_to_torch.py index 1bb57295c1..8de3a90805 100755 --- a/tools/embeddings_to_torch.py +++ b/tools/embeddings_to_torch.py @@ -5,7 +5,6 @@ import argparse import torch from onmt.utils.logging import init_logger, logger -from onmt.inputters.inputter import _old_style_vocab def get_vocabs(dict_path): @@ -13,13 +12,10 @@ def get_vocabs(dict_path): vocs = [] for side in ['src', 'tgt']: - if _old_style_vocab(fields): - vocab = next((v for n, v in fields if n == side), None) - else: - try: - vocab = fields[side].base_field.vocab - except AttributeError: - vocab = fields[side].vocab + try: + vocab = fields[side].base_field.vocab + except AttributeError: + vocab = fields[side].vocab vocs.append(vocab) enc_vocab, dec_vocab = vocs diff --git a/tools/extract_embeddings.py b/tools/extract_embeddings.py index bc518232a8..0e414465d8 100644 --- a/tools/extract_embeddings.py +++ b/tools/extract_embeddings.py @@ -4,7 +4,6 @@ import onmt import onmt.model_builder -import onmt.inputters as inputters import onmt.opts from onmt.utils.misc import use_gpu @@ -43,11 +42,7 @@ def main(): map_location=lambda storage, loc: storage) model_opt = checkpoint['opt'] - vocab = checkpoint['vocab'] - if inputters.old_style_vocab(vocab): - fields = onmt.inputters.load_old_vocab(vocab) - else: - fields = vocab + fields = checkpoint['vocab'] src_dict = fields['src'].base_field.vocab # assumes src is text tgt_dict = fields['tgt'].base_field.vocab diff --git a/tools/create_vocabulary.py b/tools/extract_vocabulary.py similarity index 86% rename from tools/create_vocabulary.py rename to tools/extract_vocabulary.py index ac7f0b1df3..e12df21f60 100644 --- a/tools/create_vocabulary.py +++ b/tools/extract_vocabulary.py @@ -2,7 +2,6 @@ # -*- coding: utf-8 -*- import argparse import sys -import os def read_files_batch(file_list): @@ -39,7 +38,7 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument('-file_type', default='text', choices=['text', 'field'], required=True, - help="""Options for vocabulary creation. + help="""Options for vocabulary extraction. The default is 'text' where the user passes a corpus or a list of corpora files for which they want to create a vocabulary from. @@ -77,11 +76,6 @@ def main(): raise ValueError("If using -file_type='field', specifies " "'src' or 'tgt' argument for -side.") import torch - try: - from onmt.inputters.inputter import _old_style_vocab - except ImportError: - sys.path.insert(1, os.path.join(sys.path[0], '..')) - from onmt.inputters.inputter import _old_style_vocab print("Reading input file...") if not len(opt.file) == 1: @@ -89,13 +83,11 @@ def main(): "argument for -file.") vocabs = torch.load(opt.file[0]) voc = dict(vocabs)[opt.side] - if _old_style_vocab(voc): - word_list = voc.itos - else: - try: - word_list = voc[0][1].base_field.vocab.itos - except AttributeError: - word_list = voc[0][1].vocab.itos + + try: + word_list = voc[0][1].base_field.vocab.itos + except AttributeError: + word_list = voc[0][1].vocab.itos print("Writing vocabulary file...") with open(opt.out_file, "wb") as f: diff --git a/tools/spm_to_vocab.py b/tools/spm_to_vocab.py new file mode 100644 index 0000000000..ba7d734daa --- /dev/null +++ b/tools/spm_to_vocab.py @@ -0,0 +1,23 @@ +# converts a SentencePiece vocabulary to the format expected by dynamic data +# (essentially converts float expected counts to "fixed precision" int pseudo +# counts) +import sys +import math +from onmt.constants import DefaultTokens + +OMIT = (DefaultTokens.UNK, DefaultTokens.BOS, DefaultTokens.EOS) + + +def convert(lines): + for line in lines: + w, c = line.rstrip('\n').split(None, 1) + if w in OMIT: + continue + c = math.exp(float(c)) * 1000000 + c = int(c) + 1 + yield w, c + + +if __name__ == '__main__': + for c, w in convert(sys.stdin): + print('{}\t{}'.format(c, w)) diff --git a/tools/vid_feature_extractor.py b/tools/vid_feature_extractor.py deleted file mode 100644 index 036b9e6d30..0000000000 --- a/tools/vid_feature_extractor.py +++ /dev/null @@ -1,300 +0,0 @@ -import argparse -import os - -import tqdm -from multiprocessing import Manager -import numpy as np -import cv2 -import torch -import torch.nn as nn -from PIL import Image -import pretrainedmodels -from pretrainedmodels.utils import TransformImage - - -Q_FIN = "finished" # end-of-queue flag - - -def read_to_imgs(file): - """Yield images and their frame number from a video file.""" - vidcap = cv2.VideoCapture(file) - success, image = vidcap.read() - idx = 0 - while success: - image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) - yield image, idx - idx += 1 - success, image = vidcap.read() - - -def vid_len(path): - """Return the length of a video.""" - return int(cv2.VideoCapture(path).get(cv2.CAP_PROP_FRAME_COUNT)) - - -class VidDset(object): - """For each video, yield its frames.""" - def __init__(self, model, root_dir, filenames): - self.root_dir = root_dir - self.filenames = filenames - self.paths = [os.path.join(self.root_dir, f) for f in self.filenames] - self.xform = TransformImage(model) - - self.current = 0 - - def __len__(self): - return len(self.filenames) - - def __getitem__(self, i): - path = self.paths[i] - return ((path, idx, self.xform(Image.fromarray(img))) - for img, idx in read_to_imgs(path)) - - def __iter__(self): - return self - - def next(self): - if self.current >= len(self): - raise StopIteration - else: - self.current += 1 - return self[self.current - 1] - - def __next__(self): - return self.next() - - -def collate_tensor(batch): - batch[-1] = torch.stack(batch[-1], 0) - - -def batch(dset, batch_size): - """Collate frames into batches of equal length.""" - batch = [[], [], []] - batch_ct = 0 - for seq in dset: - for path, idx, img in seq: - if batch_ct == batch_size: - collate_tensor(batch) - yield batch - batch = [[], [], []] - batch_ct = 0 - batch[0].append(path) - batch[1].append(idx) - batch[2].append(img) - batch_ct += 1 - if batch_ct != 0: - collate_tensor(batch) - yield batch - - -class FeatureExtractor(nn.Module): - """Extract feature vectors from a batch of frames.""" - def __init__(self): - super(FeatureExtractor, self).__init__() - self.model = pretrainedmodels.resnet152() - self.FEAT_SIZE = 2048 - - def forward(self, x): - return self.model.avgpool( - self.model.features(x)).view(-1, 1, self.FEAT_SIZE) - - -class Reconstructor(object): - """Turn batches of feature vectors into sequences for each video. - Assumes data is ordered (use one reconstructor per process). - :func:`push()` batches in. When finished, :func:`flush()` - the last sequence. - """ - - def __init__(self, out_path, finished_queue): - self.out_path = out_path - self.feats = None - self.finished_queue = finished_queue - - def save(self, path, feats): - np.save(path, feats.numpy()) - - @staticmethod - def name_(path, out_path): - vid_path = path - vid_fname = os.path.basename(vid_path) - vid_id = os.path.splitext(vid_fname)[0] - - save_fname = vid_id + ".npy" - save_path = os.path.join(out_path, save_fname) - return save_path, vid_id - - def name(self, path): - return self.name_(path, self.out_path) - - def push(self, paths, idxs, feats): - start = 0 - for i, idx in enumerate(idxs): - if idx == 0: - if self.feats is None and i == 0: - # degenerate case - continue - these_finished_seq_feats = feats[start:i] - if self.feats is not None: - all_last_seq_feats = torch.cat( - [self.feats, these_finished_seq_feats], 0) - else: - all_last_seq_feats = these_finished_seq_feats - if i - 1 < 0: - name = self.path - else: - name = paths[i-1] - save_path, vid_id = self.name(name) - self.save(save_path, all_last_seq_feats) - n_feats = all_last_seq_feats.shape[0] - self.finished_queue.put((vid_id, n_feats)) - self.feats = None - start = i - # cache the features - if self.feats is None: - self.feats = feats[start:] - else: - self.feats = torch.cat([self.feats, feats[start:]], 0) - self.path = paths[-1] - - def flush(self): - if self.feats is not None: # shouldn't be - save_path, vid_id = self.name(self.path) - self.save(save_path, self.feats) - self.finished_queue.put((vid_id, self.feats.shape[0])) - - -def finished_watcher(finished_queue, world_size, root_dir, files): - """Keep a progress bar of frames finished.""" - n_frames = sum(vid_len(os.path.join(root_dir, f)) for f in files) - n_finished_frames = 0 - with tqdm.tqdm(total=n_frames, unit="Fr") as pbar: - n_proc_finished = 0 - while True: - item = finished_queue.get() - if item == Q_FIN: - n_proc_finished += 1 - if n_proc_finished == world_size: - return - else: - vid_id, n_these_frames = item - n_finished_frames += n_these_frames - pbar.set_postfix(vid=vid_id) - pbar.update(n_these_frames) - - -def run(device_id, world_size, root_dir, batch_size_per_device, - feats_queue, files): - """Process a disjoint subset of the videos on each device.""" - if world_size > 1: - these_files = [f for i, f in enumerate(files) - if i % world_size == device_id] - else: - these_files = files - - fe = FeatureExtractor() - dset = VidDset(fe.model, root_dir, these_files) - dev = torch.device("cuda", device_id) \ - if device_id >= 0 else torch.device("cpu") - fe.to(dev) - fe = fe.eval() - with torch.no_grad(): - for samp in batch(dset, batch_size_per_device): - paths, idxs, images = samp - images = images.to(dev) - feats = fe(images) - if torch.is_tensor(feats): - feats = feats.to("cpu") - else: - feats = [f.to("cpu") for f in feats] - feats_queue.put((paths, idxs, feats)) - feats_queue.put(Q_FIN) - return - - -def saver(out_path, feats_queue, finished_queue): - rc = Reconstructor(out_path, finished_queue) - while True: - item = feats_queue.get() - if item == Q_FIN: - rc.flush() - finished_queue.put(Q_FIN) - return - else: - paths, idxs, feats = item - rc.push(paths, idxs, feats) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--root_dir", type=str, required=True, - help="Directory of videos.") - parser.add_argument("--out_dir", type=str, required=True, - help="Directory for output features.") - parser.add_argument("--world_size", type=int, default=1, - help="Number of devices to run on.") - parser.add_argument("--batch_size_per_device", type=int, default=512) - opt = parser.parse_args() - - batch_size_per_device = opt.batch_size_per_device - root_dir = opt.root_dir - out_path = opt.out_dir - if not os.path.exists(out_path): - os.makedirs(out_path) - - # mp queues don't work well between procs unless they're from a manager - manager = Manager() - finished_queue = manager.Queue() - - world_size = opt.world_size if torch.cuda.is_available() else -1 - - mp = torch.multiprocessing.get_context("spawn") - procs = [] - - print("Starting processing. Progress bar startup can take some time, but " - "processing will start in the meantime.") - - files = list(sorted(list(os.listdir(root_dir)))) - files = [f for f in files - if os.path.basename(Reconstructor.name_(f, out_path)[0]) - not in os.listdir(out_path)] - - procs.append(mp.Process( - target=finished_watcher, - args=(finished_queue, world_size, root_dir, files), - daemon=False - )) - procs[0].start() - - if world_size >= 1: - feat_queues = [manager.Queue(2) for _ in range(world_size)] - for feats_queue, device_id in zip(feat_queues, range(world_size)): - # each device has its own saver so that reconstructing is easier - procs.append(mp.Process( - target=run, - args=(device_id, world_size, root_dir, - batch_size_per_device, feats_queue, files), - daemon=True)) - procs[-1].start() - procs.append(mp.Process( - target=saver, - args=(out_path, feats_queue, finished_queue), - daemon=True)) - procs[-1].start() - else: - feats_queue = manager.Queue() - procs.append(mp.Process( - target=run, - args=(-1, 1, root_dir, - batch_size_per_device, feats_queue, files), - daemon=True)) - procs[-1].start() - procs.append(mp.Process( - target=saver, - args=(out_path, feats_queue, finished_queue), - daemon=True)) - procs[-1].start() - - for p in procs: - p.join()