diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..6c1eddb6
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+.ipynb_checkpoints
+*.pyc
diff --git a/DeepSpeech.ipynb b/DeepSpeech.ipynb
index a6fde0c6..d8ae1d48 100644
--- a/DeepSpeech.ipynb
+++ b/DeepSpeech.ipynb
@@ -2,7 +2,9 @@
"cells": [
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": true
+ },
"source": [
"# Introduction"
]
@@ -45,32 +47,6 @@
"\n"
]
},
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Data Import"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The import routines for the [TED-LIUM](http://www-lium.univ-lemans.fr/en/content/ted-lium-corpus) have yet to be written."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {
- "collapsed": true
- },
- "outputs": [],
- "source": [
- "#from ted_lium import input_data\n",
- "#ted_lium = input_data.read_data_sets(\"./TEDLIUM_release2\")"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -94,14 +70,15 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import numpy as np\n",
- "import tensorflow as tf"
+ "import tensorflow as tf\n",
+ "from tensorflow.python.ops import ctc_ops"
]
},
{
@@ -124,7 +101,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 2,
"metadata": {
"collapsed": true
},
@@ -134,9 +111,9 @@
"beta1 = 0.9 # TODO: Determine a reasonable value for this\n",
"beta2 = 0.999 # TODO: Determine a reasonable value for this\n",
"epsilon = 1e-8 # TODO: Determine a reasonable value for this\n",
- "training_iters = 100000 # TODO: Determine a reasonable value for this\n",
- "batch_size = 128 # TODO: Determine a reasonable value for this\n",
- "display_step = 10 # TODO: Determine a reasonable value for this"
+ "training_iters = 5000 # TODO: Determine a reasonable value for this\n",
+ "batch_size = 1 # TODO: Determine a reasonable value for this\n",
+ "display_step = 1 # TODO: Determine a reasonable value for this"
]
},
{
@@ -150,7 +127,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 3,
"metadata": {
"collapsed": true
},
@@ -168,7 +145,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 4,
"metadata": {
"collapsed": true
},
@@ -190,18 +167,7 @@
"source": [
"Now we will introduce several constants related to the geometry of the network.\n",
"\n",
- "The network views each speech sample as a sequence of time-slices $x^{(i)}_t$ of length $T^{(i)}$. As the speech samples vary in length, we know that $T^{(i)}$ need not equal $T^{(j)}$ for $i \\ne j$. However, BRNN in TensorFlow are unable to deal with sequences with differing lengths. Thus, we must pad speech sample sequences with trailing zeros such that they are all of the same length. This common padded length is captured in the variable `n_steps` "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {
- "collapsed": true
- },
- "outputs": [],
- "source": [
- "n_steps = 500 # TODO: Determine this programatically from the longest speech sample"
+ "The network views each speech sample as a sequence of time-slices $x^{(i)}_t$ of length $T^{(i)}$. As the speech samples vary in length, we know that $T^{(i)}$ need not equal $T^{(j)}$ for $i \\ne j$. However, BRNN in TensorFlow are unable to deal with sequences with differing lengths. Thus, we must pad speech sample sequences with trailing zeros such that they are all of the same length. This common padded length is captured in the variable `n_steps` which will be set after the data set is loaded. "
]
},
{
@@ -213,7 +179,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {
"collapsed": true
},
@@ -231,7 +197,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 6,
"metadata": {
"collapsed": true
},
@@ -249,7 +215,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 7,
"metadata": {
"collapsed": true
},
@@ -306,7 +272,7 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 8,
"metadata": {
"collapsed": true
},
@@ -324,7 +290,7 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 9,
"metadata": {
"collapsed": true
},
@@ -342,7 +308,7 @@
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 10,
"metadata": {
"collapsed": true
},
@@ -360,7 +326,7 @@
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 11,
"metadata": {
"collapsed": true
},
@@ -369,6 +335,50 @@
"n_hidden_6 = n_character"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Data Import"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Next we will import the [TED-LIUM](http://www-lium.univ-lemans.fr/en/content/ted-lium-corpus) data"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "from util.importers.ted_lium import read_data_sets\n",
+ "ted_lium = read_data_sets('./data/smoke_test', n_input, n_context)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now that we have loaded the data we can set the `n_steps` paramater"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "n_steps = ted_lium.train.max_batch_seq_len"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -403,7 +413,7 @@
"source": [
"The placeholder `y` represents the text transcript of each element in a batch. `y` is of type \"SparseTensor\" required by the CTC algorithm. The details of how the text transcripts are encoded in to a \"SparseTensor\" will be presented below.\n",
"\n",
- "The placeholder `x` is a place holder for the the speech features along with their prefix and postfix contexts for each element in a batch. As it represents MFCC features, its type is \"float\". The `None` dimension of its shape\n",
+ "The placeholder `x` is a place holder for the speech features along with their prefix and postfix contexts for each element in a batch. As it represents MFCC features, its type is \"float\". The `None` dimension of its shape\n",
"\n",
"```python\n",
"[None, n_steps, n_input + 2*n_input*n_context]\n",
@@ -416,12 +426,37 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "As we will be employing dropout on the feedforward layers of the network we will also introduce a placeholder `keep_prob` which is a placeholder for the dropout rate for the feedforward layers"
+ "The next placeholder is for the sequence lengths of the elements in each batch"
]
},
{
"cell_type": "code",
"execution_count": 15,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "seq_len = tf.placeholder(tf.int32, [None])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The `None` dimension of the placeholder `seq_len`, as in the case of the placeholders `x` and `y`, is a 'placeholder' for the batch size. So, `seq_len` is a placeholder for a vector of 32 bit integers. Each one of these 32 bit integers holds the length of the corresponding element in the batch."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "As we will be employing dropout on the feedforward layers of the network we will also introduce a placeholder `keep_prob` which is a placeholder for the dropout rate for the feedforward layers"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
"metadata": {
"collapsed": true
},
@@ -443,7 +478,7 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 17,
"metadata": {
"collapsed": false
},
@@ -476,7 +511,7 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 18,
"metadata": {
"collapsed": false
},
@@ -636,7 +671,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 19,
"metadata": {
"collapsed": false
},
@@ -658,962 +693,5202 @@
"collapsed": true
},
"source": [
- "In accord with [Deep Speech: Scaling up end-to-end speech recognition](http://arxiv.org/abs/1412.5567), the loss function used by our network should be the CTC loss function[[2]](http://www.cs.toronto.edu/~graves/preprint.pdf). Unfortunately, as of this writing, the CTC loss function[[2]](http://www.cs.toronto.edu/~graves/preprint.pdf) is not implemented within TensorFlow[[5]](https://github.com/tensorflow/tensorflow/issues/32). Thus we will have to implement it ourselves. The next few sections are dedicated to this implementation."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Introduction"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The CTC algorithm was specifically designed for temporal classification tasks; that is, for sequence labelling problems where the alignment between the inputs and the target labels is unknown. Unlike hybrid approaches combining HMM and DNN, CTC models all aspects of the sequence with a single neural network, and does not require the network to be combined with a HMM. It also does not require pre-segmented training data, or external post-processing to extract the label sequence from the network outputs.\n",
- "\n",
- "Generally, neural networks require separate training targets for every timeslice in the input sequence. This has two important consequences. First, it means that the training data must be pre-segmented to provide targets for every timeslice. Second, as the network only outputs local classifications, global aspects of the sequence, such as the likelihood of two labels appearing consecutively, must be modelled externally. Indeed, without some form of post-processing the final label sequence cannot reliably be inferred at all.\n",
- "\n",
- "CTC avoids this problem by allowing the network to make label predictions at any point in the input sequence, so long as the overall sequence of labels is correct. This removes the need for pre-segmented data, since the alignment of the labels with the input is no longer important. Moreover, CTC directly outputs the probabilities of the complete label sequences, which means that no external post-processing is required to use the network as a temporal classifier."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## From Outputs to Labellings"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "For a sequence labelling task where the labels are drawn from an alphabet $A$, CTC consists of a logits output layer, our `layer_6`, with one more unit than there are labels in `A`. The activations of the first `|A|` units correspond to the probabilities of outputting the corresponding labels at particular times, given the input sequence and the network weights. The activation of the extra unit corresponds to the probability of outputting a $blank$, or no label. The complete sequence of network outputs is then used to define a distribution over all possible label sequences of length up to that of the input sequence."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Defining the extended alphabet $A′ = A \\cup \\{blank\\}$, the activation $y_{t,p}$ of network output $p$ at time $t$ is interpreted as the probability that the network will output element $p$ of $A′$ at time $t$, given the length $T$ input sequence $x$. Let $A′^T$ denote the set of length $T$ sequences over $A′$. Then, if we assume the output probabilities at each timestep to be independent of those at other timesteps (or rather, conditionally independent given $x$), we get the following conditional distribution over $\\pi \\in A′^T$:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr( \\pi \\, | \\, x ) = \\prod_{t=1}^{T} y_{t,\\pi_t}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "From now on we refer to the sequences $\\pi$ over $A′$ as *paths*, to distinguish them from the *label sequences* or *labellings* $l$ over $A$. The next step is to define a many-to-one function $\\mathcal{B} : A′^T \\rightarrow A^{\\le T}$, from the set of paths onto the set $A^{\\le T}$ of possible labellings of $x$ (i.e. the set of sequences of length less than or equal to $T$ over $A$). We do this by removing first the repeated labels and then the blanks from the paths. For example,"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "\\begin{align}\n",
- "\\mathcal{B}(a − ab−) &= aab \\\\\n",
- "\\mathcal{B}(−aa − −abb) &= aab.\n",
- "\\end{align}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n",
- "Intuitively, this corresponds to outputting a new label when the network either switches from predicting no label to predicting a label, or from predicting one label to another. As $\\mathcal{B}$ is many-to-one, the probability of some labelling $l \\in A^{\\le T}$ can be calculated by summing the probabilities of all the paths mapped onto it by $\\mathcal{B}$:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr( l \\, | \\, x) = \\sum_{\\pi \\in \\mathcal{B}^{-1}(l)} \\Pr( \\pi \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "This 'collapsing together' of different paths onto the same labelling is what makes it possible for CTC to use unsegmented data, because it allows the network to predict the labels without knowing in advance where they occur. In theory, it also makes CTC networks unsuitable for tasks where the location of the labels must be determined. However in practice CTC tends to output labels close to where they occur in the input sequence."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Role of the Blank Labels"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "In the original formulation of CTC there were no blank labels, and $\\mathcal{B}(\\pi)$ was simply $\\pi$ with repeated labels removed. This led to two problems. First, the same label could not appear twice in a row, since transitions only occurred when $\\pi$ passed between different labels. Second, the network was required to continue predicting one label until the next began, which is a burden in tasks where the input segments corresponding to consecutive labels are widely separated by unlabelled data (for example, in speech recognition there are often pauses or non-speech noises between the words in an utterance)."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Forward-Backward Algorithm"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "So far we have defined the conditional probabilities $\\Pr(l \\, | \\, x)$ of the possible label sequences. Now we need an efficient way of calculating them. At first sight, the previous equation suggests this will be problematic. The sum is over all paths corresponding to a given labelling. The number of these paths grows exponentially with the length of the input sequence. More precisely, for a length $T$ input sequence and a length $U$ labelling there are"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$2^{T−U^2+U(T−3)}3^{(U−1)(T−U)−2}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "paths."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Fortunately the problem can be solved with a dynamic-programming algorithm similar to the forward-backward algorithm for HMM's[[6]](http://www.ee.columbia.edu/~dpwe/e6820/papers/Rabiner89-hmm.pdf). The key idea is that the sum over paths corresponding to a labelling l can be broken down into an iterative sum over paths corresponding to prefixes of that labelling."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "To allow for blanks in the output paths, we consider a modified \"label sequence\" $l′$, with blanks added to the beginning and the end of $l$, and inserted between every pair of consecutive labels. If the length of $l$ is $U$, the length of $l′$ is $U′ = 2U + 1$. In calculating the probabilities of prefixes of $l′$ we allow all transitions between blank and non-blank labels, and also those between any pair of distinct non-blank labels."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "For a labelling $l$, the forward variable $\\alpha(t,u)$ is defined as the summed probability of all length $t$ paths that are mapped by $\\mathcal{B}$ onto the length $\\left \\lfloor{u/2}\\right \\rfloor$ prefix of $l$. (Note, $\\left \\lfloor{u/2}\\right \\rfloor$ is the *floor* of $u/2$, the greatest integer less than or equal to $u/2$.) For some sequence $s$, let $s_{p:q}$ denote the subsequence $s_p$, $s_{p+1}$, ..., $s_{q−1}$, $s_q$, and define the set $V(t,u) \\equiv \\{ \\pi \\in A′^t : \\mathcal{B}(\\pi) = l_{1:\\left \\lfloor{u/2}\\right \\rfloor} \\text{ and } \\pi_t = l'_u \\}$. We can then define $\\alpha(t,u)$ as"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t,u) \\equiv \\sum_{\\pi \\in V(t,u)} \\prod_{i=1}^{t} y_{i,\\pi_i}$$ "
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "As we will see, the forward variables at time $t$ can be calculated recursively from those at time $t − 1$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Given the above formulation, the probability of $l$ can be expressed as the sum of the forward variables with and without the final blank at time $T$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr( l \\, | \\, x) = \\alpha(T, U') + \\alpha(T, U' - 1)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "All correct paths must start with either a blank $(b)$ or the first symbol in $l$ $(l_1)$,\n",
- "yielding the following initial conditions:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "\\begin{align}\n",
- "\\alpha(1, 1) &= y_{1,b} \\\\\n",
- "\\alpha(1, 2) &= y_{1,l_1} \\\\\n",
- "\\alpha(1, u) &= 0, \\, \\forall u > 2\n",
- "\\end{align}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Thereafter the variables can be calculated recursively:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t,u) = y_{t, l'_u} \\sum_{i = f(u)}^{u} \\alpha(t-1, i)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "f(u) =\n",
- "\\begin{cases}\n",
- "u - 1,& \\text{if } l'_u = blank \\text{ or } l'_{u−2} = l'_{u} \\\\\n",
- "u - 2,& \\text{otherwise}\n",
- "\\end{cases}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "which one can derive by expanding $\\alpha(t, u)$ and substituting $\\alpha(t -1, u)$ into the expansion."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Graphically we can express the recurrence relation for $\\alpha(t, u)$ as follows"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where $t$ runs along the $x$ axis and $u$ runs along the $y$ axis. The black circles of the diagram represent $blank$ elements of $l'$ while the white circles represent non-$blank$ elements of $l'$. The arrows represent computational dependencies derived from our recursion relation for $\\alpha(t,u)$. So, for example, the value of $\\alpha(2,3)$, corresponding to the $blank$ at $t=2$ and $u=3$, is derived from $\\alpha(1,2)$. Similarly, the value of $\\alpha(2,2)$, corresponding to the letter $c$ at $t=2$ and $u=2$, is derived from $\\alpha(1,2)$ and $\\alpha(1,1)$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Note also that"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t, u) = 0 \\,\\, \\forall u < U′ − 2(T − t) − 1$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "because these variables correspond to states for which there are not enough timesteps left to complete the sequence. We also impose the boundary condition"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t, 0) = 0 \\, \\, \\forall t$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The backward variables $\\beta(t,u)$ are defined as the summed probabilities of all paths starting at $t + 1$ that \"complete\" $l$ when appended to any path $\\hat{\\pi}$ contributing to $\\alpha(t,u)$. Define $W(t,u) \\equiv \\{ \\pi \\in A′^{T−t} : \\mathcal{B}(\\hat{\\pi} + \\pi) = l \\, \\, \\forall \\hat{\\pi} \\in V(t,u) \\}$. Then"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\beta(t,u) \\equiv \\sum_{\\pi \\in W(t,u)} \\prod_{i=1}^{T - t} y_{t + i,\\pi_i}$$ "
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The rules for initialisation of the backward variables are as follows"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "\\begin{align}\n",
- "\\beta(T, U') &= 1 \\\\\n",
- "\\beta(T, U' - 1) &= 1 \\\\\n",
- "\\beta(T, u) &= 0, \\, \\forall u < U' - 1\n",
- "\\end{align}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The rules for recursion are as follows"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\beta(t, u) = \\sum_{i = u}^{g(u)} \\beta(t+1, i) y_{t+1, l'_i}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "g(u) =\n",
- "\\begin{cases}\n",
- "u + 1,& \\text{if } l'_u = blank \\text{ or } l'_{u+2} = l'_{u} \\\\\n",
- "u + 2,& \\text{otherwise}\n",
- "\\end{cases}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Note that"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\beta(t, u) = 0 \\, \\, \\forall u > 2t$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "and"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\beta(t, U' + 1) = 0 \\, \\, \\forall t$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Log Scale"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "In practice, the above recursions will soon lead to underflows on any digital computer. A good way to avoid this is to work in the log scale, and only exponentiate to find the true probabilities at the end of the calculation. A useful equation in this context is"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\ln(a + b) = \\ln(a) + \\ln(1 + e^{\\ln b − \\ln a})$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "which allows the forward and backward variables to be summed while remaining in the log scale."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Loss Function"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The CTC loss function $\\mathcal{L}(S)$ is defined as the negative log probability of correctly labelling all the training examples in some training set S:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\mathcal{L}(S) = - \\ln \\prod_{(x,z) \\in S} \\Pr(z \\, | \\, x) = - \\sum_{(x,z) \\in S} \\ln \\Pr(z \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Because the function is differentiable, its derivatives with respect to the network weights can be calculated with backpropagation through time, and the network can then be trained with any gradient-based non-linear optimisation algorithm."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "We also define the *example loss*"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\mathcal{L}(x,z) \\equiv - \\ln \\Pr(z \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Obviously"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\mathcal{L}(S) = \\sum_{(x,z) \\in S} \\mathcal{L}(x,z)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Now if we identify $l$ and $z$ and define $X(t,u) \\equiv \\{ \\pi \\in A′^T : \\mathcal{B}(\\pi) = z, \\, \\pi_t = z′_u \\}$, then our definition of $\\alpha(t, u)$ and $\\beta(t, u)$ imply"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t, u) \\beta(t, u) = \\sum_{\\pi \\in X(t,u)} \\prod_{t = 1}^{T} y_{t, \\pi_t}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "thus substituting our previous expression for $\\Pr(\\pi \\, | \\, x)$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\alpha(t, u) \\beta(t, u) = \\sum_{\\pi \\in X(t,u)} \\Pr(\\pi \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "From our expression for $\\Pr(l \\, | \\, x)$ we can see that this is the portion of the total probability of $\\Pr(z \\, | \\, x)$ due to those paths going through $z′_u$ at time $t$. For any $t$, we can therefore sum over all $u$ to get"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr(z \\, | \\, x) = \\sum_{u = 1}^{|z'|} \\alpha(t, u) \\beta(t, u)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Thus the *example loss* is given by"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\mathcal{L}(x, z) = - \\ln \\sum_{u = 1}^{|z'|} \\alpha(t, u) \\beta(t, u)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "As"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\mathcal{L}(S) = \\sum_{(x,z) \\in S} \\mathcal{L}(x,z)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "the gradient of $\\mathcal{L}(S)$ can be computed by computing the gradient of $\\mathcal{L}(x, z)$. This gradient can be computed using the formulas above and TensorFlow's automatic differentiation."
+ "In accord with [Deep Speech: Scaling up end-to-end speech recognition](http://arxiv.org/abs/1412.5567), the loss function used by our network should be the CTC loss function[[2]](http://www.cs.toronto.edu/~graves/preprint.pdf). Conveniently, this loss function is implemented in TensorFlow. Thus, we can simply make use of this implementation to define our loss."
]
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 20,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "# CTC loss requires layer_6 be time major\n",
+ "layer_6 = tf.transpose(layer_6, [1, 0, 2])\n",
+ "\n",
+ "# Compute the CTC loss\n",
+ "total_loss = ctc_ops.ctc_loss(layer_6, y, seq_len)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now, instead of using the total loss for the entire batch we want to calculate the average loss across the batch to facilitate comparing results as the batch size varies. So we calculate the following "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
- "# cost = .... TODO: Compute the cost using the above formula"
+ "avg_loss = tf.reduce_mean(total_loss)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "## Decoding"
+ "# Optimizer"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "Once the network is trained, we would ideally label some unknown input sequence $x$ by choosing the most probable labelling $l^∗$:"
+ "In constrast to [Deep Speech: Scaling up end-to-end speech recognition](http://arxiv.org/abs/1412.5567), in which [Nesterov’s Accelerated Gradient Descent](www.cs.toronto.edu/~fritz/absps/momentum.pdf) was used, we will use the Adam method for optimization[[3](http://arxiv.org/abs/1412.6980)], because, generally, it requires less fine-tuning."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate,\n",
+ " beta1=beta1,\n",
+ " beta2=beta2,\n",
+ " epsilon=epsilon).minimize(avg_loss)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "$$l^* \\equiv \\underset{l}{\\operatorname{argmax}} \\Pr(l \\, | \\, x)$$"
+ "# Decoder"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "Using the terminology of HMM's, we refer to the task of finding this labelling as *decoding*. Unfortunately, we do not know of a general, tractable decoding algorithm for CTC. However we now present two approximate methods that work well in practice."
+ "Next to monitor training progress we will intoduce an operator used to decode"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "decoded, _ = ctc_ops.ctc_beam_search_decoder(layer_6, seq_len)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "### Best Path Decoding"
+ "Using this decoding operator we can then calculate the CER, otherwise known as accuracy, of the system"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "acc = tf.reduce_mean(tf.edit_distance(tf.cast(decoded[0], tf.int32), y))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "The first method, which refer to as *best path decoding*, is based on the assumption that the most probable path corresponds to the most probable labelling"
+ "# Training"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "$$l^* \\approx \\mathcal{B}(\\pi^*)$$"
+ "Now we will begin the process of training the network"
]
},
{
- "cell_type": "markdown",
- "metadata": {},
+ "cell_type": "code",
+ "execution_count": 25,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch: 0001 avg_cer= 3.557692289\n",
+ "Epoch: 0002 avg_cer= 3.076923132\n",
+ "Epoch: 0003 avg_cer= 3.307692289\n",
+ "Epoch: 0004 avg_cer= 3.365384579\n",
+ "Epoch: 0005 avg_cer= 3.673076868\n",
+ "Epoch: 0006 avg_cer= 4.211538315\n",
+ "Epoch: 0007 avg_cer= 4.153846264\n",
+ "Epoch: 0008 avg_cer= 4.211538315\n",
+ "Epoch: 0009 avg_cer= 4.096153736\n",
+ "Epoch: 0010 avg_cer= 4.307692528\n",
+ "Epoch: 0011 avg_cer= 4.423077106\n",
+ "Epoch: 0012 avg_cer= 4.153846264\n",
+ "Epoch: 0013 avg_cer= 4.134615421\n",
+ "Epoch: 0014 avg_cer= 3.961538553\n",
+ "Epoch: 0015 avg_cer= 4.057692528\n",
+ "Epoch: 0016 avg_cer= 4.096153736\n",
+ "Epoch: 0017 avg_cer= 4.096153736\n",
+ "Epoch: 0018 avg_cer= 4.115384579\n",
+ "Epoch: 0019 avg_cer= 4.134615421\n",
+ "Epoch: 0020 avg_cer= 4.153846264\n",
+ "Epoch: 0021 avg_cer= 4.076922894\n",
+ "Epoch: 0022 avg_cer= 4.307692528\n",
+ "Epoch: 0023 avg_cer= 4.134615421\n",
+ "Epoch: 0024 avg_cer= 4.115384579\n",
+ "Epoch: 0025 avg_cer= 4.173077106\n",
+ "Epoch: 0026 avg_cer= 4.442307472\n",
+ "Epoch: 0027 avg_cer= 4.346153736\n",
+ "Epoch: 0028 avg_cer= 4.153846264\n",
+ "Epoch: 0029 avg_cer= 3.750000000\n",
+ "Epoch: 0030 avg_cer= 3.923076868\n",
+ "Epoch: 0031 avg_cer= 3.923076868\n",
+ "Epoch: 0032 avg_cer= 3.634615421\n",
+ "Epoch: 0033 avg_cer= 3.211538553\n",
+ "Epoch: 0034 avg_cer= 2.711538553\n",
+ "Epoch: 0035 avg_cer= 2.384615421\n",
+ "Epoch: 0036 avg_cer= 2.346153736\n",
+ "Epoch: 0037 avg_cer= 2.115384579\n",
+ "Epoch: 0038 avg_cer= 1.961538434\n",
+ "Epoch: 0039 avg_cer= 2.442307711\n",
+ "Epoch: 0040 avg_cer= 1.384615421\n",
+ "Epoch: 0041 avg_cer= 1.096153855\n",
+ "Epoch: 0042 avg_cer= 2.673076868\n",
+ "Epoch: 0043 avg_cer= 2.711538553\n",
+ "Epoch: 0044 avg_cer= 2.750000000\n",
+ "Epoch: 0045 avg_cer= 2.211538553\n",
+ "Epoch: 0046 avg_cer= 2.653846264\n",
+ "Epoch: 0047 avg_cer= 2.750000000\n",
+ "Epoch: 0048 avg_cer= 2.384615421\n",
+ "Epoch: 0049 avg_cer= 2.730769157\n",
+ "Epoch: 0050 avg_cer= 2.730769157\n",
+ "Epoch: 0051 avg_cer= 2.403846264\n",
+ "Epoch: 0052 avg_cer= 2.653846264\n",
+ "Epoch: 0053 avg_cer= 2.173076868\n",
+ "Epoch: 0054 avg_cer= 2.461538553\n",
+ "Epoch: 0055 avg_cer= 2.461538553\n",
+ "Epoch: 0056 avg_cer= 2.307692289\n",
+ "Epoch: 0057 avg_cer= 2.615384579\n",
+ "Epoch: 0058 avg_cer= 2.326923132\n",
+ "Epoch: 0059 avg_cer= 2.750000000\n",
+ "Epoch: 0060 avg_cer= 2.750000000\n",
+ "Epoch: 0061 avg_cer= 2.730769157\n",
+ "Epoch: 0062 avg_cer= 2.673076868\n",
+ "Epoch: 0063 avg_cer= 2.596153736\n",
+ "Epoch: 0064 avg_cer= 2.288461447\n",
+ "Epoch: 0065 avg_cer= 0.923076928\n",
+ "Epoch: 0066 avg_cer= 2.288461447\n",
+ "Epoch: 0067 avg_cer= 2.346153736\n",
+ "Epoch: 0068 avg_cer= 2.461538553\n",
+ "Epoch: 0069 avg_cer= 2.538461447\n",
+ "Epoch: 0070 avg_cer= 2.596153736\n",
+ "Epoch: 0071 avg_cer= 2.730769157\n",
+ "Epoch: 0072 avg_cer= 2.442307711\n",
+ "Epoch: 0073 avg_cer= 2.750000000\n",
+ "Epoch: 0074 avg_cer= 0.923076928\n",
+ "Epoch: 0075 avg_cer= 0.961538434\n",
+ "Epoch: 0076 avg_cer= 0.980769217\n",
+ "Epoch: 0077 avg_cer= 0.980769217\n",
+ "Epoch: 0078 avg_cer= 0.884615362\n",
+ "Epoch: 0079 avg_cer= 0.884615362\n",
+ "Epoch: 0080 avg_cer= 1.096153855\n",
+ "Epoch: 0081 avg_cer= 0.884615362\n",
+ "Epoch: 0082 avg_cer= 0.884615362\n",
+ "Epoch: 0083 avg_cer= 1.942307711\n",
+ "Epoch: 0084 avg_cer= 1.403846145\n",
+ "Epoch: 0085 avg_cer= 0.942307711\n",
+ "Epoch: 0086 avg_cer= 0.846153855\n",
+ "Epoch: 0087 avg_cer= 0.846153855\n",
+ "Epoch: 0088 avg_cer= 0.865384638\n",
+ "Epoch: 0089 avg_cer= 0.865384638\n",
+ "Epoch: 0090 avg_cer= 0.961538434\n",
+ "Epoch: 0091 avg_cer= 1.000000000\n",
+ "Epoch: 0092 avg_cer= 1.096153855\n",
+ "Epoch: 0093 avg_cer= 0.846153855\n",
+ "Epoch: 0094 avg_cer= 0.846153855\n",
+ "Epoch: 0095 avg_cer= 0.846153855\n",
+ "Epoch: 0096 avg_cer= 0.846153855\n",
+ "Epoch: 0097 avg_cer= 0.807692289\n",
+ "Epoch: 0098 avg_cer= 0.884615362\n",
+ "Epoch: 0099 avg_cer= 0.865384638\n",
+ "Epoch: 0100 avg_cer= 2.500000000\n",
+ "Epoch: 0101 avg_cer= 0.730769217\n",
+ "Epoch: 0102 avg_cer= 0.846153855\n",
+ "Epoch: 0103 avg_cer= 0.884615362\n",
+ "Epoch: 0104 avg_cer= 0.846153855\n",
+ "Epoch: 0105 avg_cer= 1.442307711\n",
+ "Epoch: 0106 avg_cer= 0.884615362\n",
+ "Epoch: 0107 avg_cer= 0.884615362\n",
+ "Epoch: 0108 avg_cer= 0.730769217\n",
+ "Epoch: 0109 avg_cer= 0.730769217\n",
+ "Epoch: 0110 avg_cer= 0.788461566\n",
+ "Epoch: 0111 avg_cer= 0.807692289\n",
+ "Epoch: 0112 avg_cer= 0.750000000\n",
+ "Epoch: 0113 avg_cer= 0.730769217\n",
+ "Epoch: 0114 avg_cer= 0.730769217\n",
+ "Epoch: 0115 avg_cer= 0.750000000\n",
+ "Epoch: 0116 avg_cer= 0.750000000\n",
+ "Epoch: 0117 avg_cer= 0.788461566\n",
+ "Epoch: 0118 avg_cer= 0.788461566\n",
+ "Epoch: 0119 avg_cer= 0.730769217\n",
+ "Epoch: 0120 avg_cer= 0.730769217\n",
+ "Epoch: 0121 avg_cer= 0.730769217\n",
+ "Epoch: 0122 avg_cer= 0.730769217\n",
+ "Epoch: 0123 avg_cer= 0.730769217\n",
+ "Epoch: 0124 avg_cer= 0.730769217\n",
+ "Epoch: 0125 avg_cer= 0.730769217\n",
+ "Epoch: 0126 avg_cer= 0.730769217\n",
+ "Epoch: 0127 avg_cer= 0.730769217\n",
+ "Epoch: 0128 avg_cer= 0.846153855\n",
+ "Epoch: 0129 avg_cer= 0.846153855\n",
+ "Epoch: 0130 avg_cer= 0.846153855\n",
+ "Epoch: 0131 avg_cer= 0.846153855\n",
+ "Epoch: 0132 avg_cer= 0.730769217\n",
+ "Epoch: 0133 avg_cer= 0.730769217\n",
+ "Epoch: 0134 avg_cer= 0.730769217\n",
+ "Epoch: 0135 avg_cer= 0.730769217\n",
+ "Epoch: 0136 avg_cer= 0.730769217\n",
+ "Epoch: 0137 avg_cer= 0.730769217\n",
+ "Epoch: 0138 avg_cer= 0.730769217\n",
+ "Epoch: 0139 avg_cer= 0.730769217\n",
+ "Epoch: 0140 avg_cer= 0.730769217\n",
+ "Epoch: 0141 avg_cer= 0.711538434\n",
+ "Epoch: 0142 avg_cer= 0.730769217\n",
+ "Epoch: 0143 avg_cer= 0.711538434\n",
+ "Epoch: 0144 avg_cer= 0.711538434\n",
+ "Epoch: 0145 avg_cer= 0.711538434\n",
+ "Epoch: 0146 avg_cer= 0.711538434\n",
+ "Epoch: 0147 avg_cer= 0.711538434\n",
+ "Epoch: 0148 avg_cer= 0.711538434\n",
+ "Epoch: 0149 avg_cer= 0.711538434\n",
+ "Epoch: 0150 avg_cer= 0.711538434\n",
+ "Epoch: 0151 avg_cer= 0.711538434\n",
+ "Epoch: 0152 avg_cer= 0.711538434\n",
+ "Epoch: 0153 avg_cer= 0.711538434\n",
+ "Epoch: 0154 avg_cer= 0.711538434\n",
+ "Epoch: 0155 avg_cer= 0.711538434\n",
+ "Epoch: 0156 avg_cer= 0.711538434\n",
+ "Epoch: 0157 avg_cer= 0.711538434\n",
+ "Epoch: 0158 avg_cer= 0.711538434\n",
+ "Epoch: 0159 avg_cer= 0.711538434\n",
+ "Epoch: 0160 avg_cer= 0.711538434\n",
+ "Epoch: 0161 avg_cer= 0.711538434\n",
+ "Epoch: 0162 avg_cer= 0.711538434\n",
+ "Epoch: 0163 avg_cer= 0.711538434\n",
+ "Epoch: 0164 avg_cer= 0.711538434\n",
+ "Epoch: 0165 avg_cer= 0.711538434\n",
+ "Epoch: 0166 avg_cer= 0.711538434\n",
+ "Epoch: 0167 avg_cer= 0.711538434\n",
+ "Epoch: 0168 avg_cer= 0.711538434\n",
+ "Epoch: 0169 avg_cer= 0.711538434\n",
+ "Epoch: 0170 avg_cer= 0.711538434\n",
+ "Epoch: 0171 avg_cer= 0.711538434\n",
+ "Epoch: 0172 avg_cer= 0.711538434\n",
+ "Epoch: 0173 avg_cer= 0.711538434\n",
+ "Epoch: 0174 avg_cer= 0.711538434\n",
+ "Epoch: 0175 avg_cer= 0.711538434\n",
+ "Epoch: 0176 avg_cer= 0.711538434\n",
+ "Epoch: 0177 avg_cer= 0.711538434\n",
+ "Epoch: 0178 avg_cer= 0.711538434\n",
+ "Epoch: 0179 avg_cer= 0.711538434\n",
+ "Epoch: 0180 avg_cer= 0.711538434\n",
+ "Epoch: 0181 avg_cer= 0.711538434\n",
+ "Epoch: 0182 avg_cer= 0.711538434\n",
+ "Epoch: 0183 avg_cer= 0.711538434\n",
+ "Epoch: 0184 avg_cer= 0.711538434\n",
+ "Epoch: 0185 avg_cer= 0.711538434\n",
+ "Epoch: 0186 avg_cer= 0.711538434\n",
+ "Epoch: 0187 avg_cer= 0.711538434\n",
+ "Epoch: 0188 avg_cer= 0.711538434\n",
+ "Epoch: 0189 avg_cer= 0.711538434\n",
+ "Epoch: 0190 avg_cer= 0.711538434\n",
+ "Epoch: 0191 avg_cer= 0.711538434\n",
+ "Epoch: 0192 avg_cer= 0.711538434\n",
+ "Epoch: 0193 avg_cer= 0.711538434\n",
+ "Epoch: 0194 avg_cer= 0.711538434\n",
+ "Epoch: 0195 avg_cer= 0.711538434\n",
+ "Epoch: 0196 avg_cer= 0.711538434\n",
+ "Epoch: 0197 avg_cer= 0.711538434\n",
+ "Epoch: 0198 avg_cer= 0.711538434\n",
+ "Epoch: 0199 avg_cer= 0.711538434\n",
+ "Epoch: 0200 avg_cer= 0.711538434\n",
+ "Epoch: 0201 avg_cer= 0.711538434\n",
+ "Epoch: 0202 avg_cer= 0.711538434\n",
+ "Epoch: 0203 avg_cer= 0.711538434\n",
+ "Epoch: 0204 avg_cer= 0.711538434\n",
+ "Epoch: 0205 avg_cer= 0.711538434\n",
+ "Epoch: 0206 avg_cer= 0.711538434\n",
+ "Epoch: 0207 avg_cer= 0.711538434\n",
+ "Epoch: 0208 avg_cer= 0.711538434\n",
+ "Epoch: 0209 avg_cer= 0.711538434\n",
+ "Epoch: 0210 avg_cer= 0.711538434\n",
+ "Epoch: 0211 avg_cer= 0.711538434\n",
+ "Epoch: 0212 avg_cer= 0.711538434\n",
+ "Epoch: 0213 avg_cer= 0.711538434\n",
+ "Epoch: 0214 avg_cer= 0.711538434\n",
+ "Epoch: 0215 avg_cer= 0.711538434\n",
+ "Epoch: 0216 avg_cer= 0.711538434\n",
+ "Epoch: 0217 avg_cer= 0.711538434\n",
+ "Epoch: 0218 avg_cer= 0.711538434\n",
+ "Epoch: 0219 avg_cer= 0.711538434\n",
+ "Epoch: 0220 avg_cer= 0.711538434\n",
+ "Epoch: 0221 avg_cer= 0.711538434\n",
+ "Epoch: 0222 avg_cer= 0.711538434\n",
+ "Epoch: 0223 avg_cer= 0.711538434\n",
+ "Epoch: 0224 avg_cer= 0.711538434\n",
+ "Epoch: 0225 avg_cer= 0.711538434\n",
+ "Epoch: 0226 avg_cer= 0.711538434\n",
+ "Epoch: 0227 avg_cer= 0.711538434\n",
+ "Epoch: 0228 avg_cer= 0.711538434\n",
+ "Epoch: 0229 avg_cer= 0.711538434\n",
+ "Epoch: 0230 avg_cer= 0.711538434\n",
+ "Epoch: 0231 avg_cer= 0.711538434\n",
+ "Epoch: 0232 avg_cer= 0.711538434\n",
+ "Epoch: 0233 avg_cer= 0.711538434\n",
+ "Epoch: 0234 avg_cer= 0.711538434\n",
+ "Epoch: 0235 avg_cer= 0.711538434\n",
+ "Epoch: 0236 avg_cer= 0.711538434\n",
+ "Epoch: 0237 avg_cer= 0.711538434\n",
+ "Epoch: 0238 avg_cer= 0.711538434\n",
+ "Epoch: 0239 avg_cer= 0.711538434\n",
+ "Epoch: 0240 avg_cer= 0.711538434\n",
+ "Epoch: 0241 avg_cer= 0.711538434\n",
+ "Epoch: 0242 avg_cer= 0.711538434\n",
+ "Epoch: 0243 avg_cer= 0.711538434\n",
+ "Epoch: 0244 avg_cer= 0.711538434\n",
+ "Epoch: 0245 avg_cer= 0.711538434\n",
+ "Epoch: 0246 avg_cer= 0.711538434\n",
+ "Epoch: 0247 avg_cer= 0.711538434\n",
+ "Epoch: 0248 avg_cer= 0.711538434\n",
+ "Epoch: 0249 avg_cer= 0.711538434\n",
+ "Epoch: 0250 avg_cer= 0.711538434\n",
+ "Epoch: 0251 avg_cer= 0.711538434\n",
+ "Epoch: 0252 avg_cer= 0.711538434\n",
+ "Epoch: 0253 avg_cer= 0.711538434\n",
+ "Epoch: 0254 avg_cer= 0.711538434\n",
+ "Epoch: 0255 avg_cer= 0.711538434\n",
+ "Epoch: 0256 avg_cer= 0.711538434\n",
+ "Epoch: 0257 avg_cer= 0.711538434\n",
+ "Epoch: 0258 avg_cer= 0.711538434\n",
+ "Epoch: 0259 avg_cer= 0.711538434\n",
+ "Epoch: 0260 avg_cer= 0.711538434\n",
+ "Epoch: 0261 avg_cer= 0.711538434\n",
+ "Epoch: 0262 avg_cer= 0.711538434\n",
+ "Epoch: 0263 avg_cer= 0.711538434\n",
+ "Epoch: 0264 avg_cer= 0.711538434\n",
+ "Epoch: 0265 avg_cer= 0.711538434\n",
+ "Epoch: 0266 avg_cer= 0.711538434\n",
+ "Epoch: 0267 avg_cer= 0.711538434\n",
+ "Epoch: 0268 avg_cer= 0.711538434\n",
+ "Epoch: 0269 avg_cer= 0.711538434\n",
+ "Epoch: 0270 avg_cer= 0.711538434\n",
+ "Epoch: 0271 avg_cer= 0.711538434\n",
+ "Epoch: 0272 avg_cer= 0.711538434\n",
+ "Epoch: 0273 avg_cer= 0.711538434\n",
+ "Epoch: 0274 avg_cer= 0.711538434\n",
+ "Epoch: 0275 avg_cer= 0.711538434\n",
+ "Epoch: 0276 avg_cer= 0.711538434\n",
+ "Epoch: 0277 avg_cer= 0.711538434\n",
+ "Epoch: 0278 avg_cer= 0.711538434\n",
+ "Epoch: 0279 avg_cer= 0.711538434\n",
+ "Epoch: 0280 avg_cer= 0.711538434\n",
+ "Epoch: 0281 avg_cer= 0.711538434\n",
+ "Epoch: 0282 avg_cer= 0.711538434\n",
+ "Epoch: 0283 avg_cer= 0.711538434\n",
+ "Epoch: 0284 avg_cer= 0.711538434\n",
+ "Epoch: 0285 avg_cer= 0.711538434\n",
+ "Epoch: 0286 avg_cer= 0.711538434\n",
+ "Epoch: 0287 avg_cer= 0.711538434\n",
+ "Epoch: 0288 avg_cer= 0.711538434\n",
+ "Epoch: 0289 avg_cer= 0.711538434\n",
+ "Epoch: 0290 avg_cer= 0.711538434\n",
+ "Epoch: 0291 avg_cer= 0.711538434\n",
+ "Epoch: 0292 avg_cer= 0.711538434\n",
+ "Epoch: 0293 avg_cer= 0.711538434\n",
+ "Epoch: 0294 avg_cer= 0.711538434\n",
+ "Epoch: 0295 avg_cer= 0.711538434\n",
+ "Epoch: 0296 avg_cer= 0.711538434\n",
+ "Epoch: 0297 avg_cer= 0.711538434\n",
+ "Epoch: 0298 avg_cer= 0.692307711\n",
+ "Epoch: 0299 avg_cer= 0.692307711\n",
+ "Epoch: 0300 avg_cer= 0.692307711\n",
+ "Epoch: 0301 avg_cer= 0.692307711\n",
+ "Epoch: 0302 avg_cer= 0.692307711\n",
+ "Epoch: 0303 avg_cer= 0.692307711\n",
+ "Epoch: 0304 avg_cer= 0.692307711\n",
+ "Epoch: 0305 avg_cer= 0.692307711\n",
+ "Epoch: 0306 avg_cer= 0.692307711\n",
+ "Epoch: 0307 avg_cer= 0.692307711\n",
+ "Epoch: 0308 avg_cer= 0.692307711\n",
+ "Epoch: 0309 avg_cer= 0.692307711\n",
+ "Epoch: 0310 avg_cer= 0.692307711\n",
+ "Epoch: 0311 avg_cer= 0.692307711\n",
+ "Epoch: 0312 avg_cer= 0.692307711\n",
+ "Epoch: 0313 avg_cer= 0.692307711\n",
+ "Epoch: 0314 avg_cer= 0.692307711\n",
+ "Epoch: 0315 avg_cer= 0.692307711\n",
+ "Epoch: 0316 avg_cer= 0.692307711\n",
+ "Epoch: 0317 avg_cer= 0.692307711\n",
+ "Epoch: 0318 avg_cer= 0.692307711\n",
+ "Epoch: 0319 avg_cer= 0.711538434\n",
+ "Epoch: 0320 avg_cer= 0.692307711\n",
+ "Epoch: 0321 avg_cer= 0.692307711\n",
+ "Epoch: 0322 avg_cer= 0.692307711\n",
+ "Epoch: 0323 avg_cer= 0.692307711\n",
+ "Epoch: 0324 avg_cer= 0.692307711\n",
+ "Epoch: 0325 avg_cer= 0.692307711\n",
+ "Epoch: 0326 avg_cer= 0.692307711\n",
+ "Epoch: 0327 avg_cer= 0.692307711\n",
+ "Epoch: 0328 avg_cer= 0.692307711\n",
+ "Epoch: 0329 avg_cer= 0.692307711\n",
+ "Epoch: 0330 avg_cer= 0.692307711\n",
+ "Epoch: 0331 avg_cer= 0.692307711\n",
+ "Epoch: 0332 avg_cer= 0.692307711\n",
+ "Epoch: 0333 avg_cer= 0.692307711\n",
+ "Epoch: 0334 avg_cer= 0.692307711\n",
+ "Epoch: 0335 avg_cer= 0.692307711\n",
+ "Epoch: 0336 avg_cer= 0.692307711\n",
+ "Epoch: 0337 avg_cer= 0.692307711\n",
+ "Epoch: 0338 avg_cer= 0.692307711\n",
+ "Epoch: 0339 avg_cer= 0.692307711\n",
+ "Epoch: 0340 avg_cer= 0.692307711\n",
+ "Epoch: 0341 avg_cer= 0.692307711\n",
+ "Epoch: 0342 avg_cer= 0.692307711\n",
+ "Epoch: 0343 avg_cer= 0.692307711\n",
+ "Epoch: 0344 avg_cer= 0.692307711\n",
+ "Epoch: 0345 avg_cer= 0.692307711\n",
+ "Epoch: 0346 avg_cer= 0.692307711\n",
+ "Epoch: 0347 avg_cer= 0.692307711\n",
+ "Epoch: 0348 avg_cer= 0.692307711\n",
+ "Epoch: 0349 avg_cer= 0.692307711\n",
+ "Epoch: 0350 avg_cer= 0.692307711\n",
+ "Epoch: 0351 avg_cer= 0.711538434\n",
+ "Epoch: 0352 avg_cer= 0.692307711\n",
+ "Epoch: 0353 avg_cer= 0.692307711\n",
+ "Epoch: 0354 avg_cer= 0.692307711\n",
+ "Epoch: 0355 avg_cer= 0.692307711\n",
+ "Epoch: 0356 avg_cer= 0.692307711\n",
+ "Epoch: 0357 avg_cer= 0.692307711\n",
+ "Epoch: 0358 avg_cer= 0.692307711\n",
+ "Epoch: 0359 avg_cer= 0.692307711\n",
+ "Epoch: 0360 avg_cer= 0.692307711\n",
+ "Epoch: 0361 avg_cer= 0.692307711\n",
+ "Epoch: 0362 avg_cer= 0.692307711\n",
+ "Epoch: 0363 avg_cer= 0.692307711\n",
+ "Epoch: 0364 avg_cer= 0.692307711\n",
+ "Epoch: 0365 avg_cer= 0.692307711\n",
+ "Epoch: 0366 avg_cer= 0.692307711\n",
+ "Epoch: 0367 avg_cer= 0.692307711\n",
+ "Epoch: 0368 avg_cer= 0.692307711\n",
+ "Epoch: 0369 avg_cer= 0.692307711\n",
+ "Epoch: 0370 avg_cer= 0.692307711\n",
+ "Epoch: 0371 avg_cer= 0.692307711\n",
+ "Epoch: 0372 avg_cer= 0.692307711\n",
+ "Epoch: 0373 avg_cer= 0.692307711\n",
+ "Epoch: 0374 avg_cer= 0.692307711\n",
+ "Epoch: 0375 avg_cer= 0.692307711\n",
+ "Epoch: 0376 avg_cer= 0.692307711\n",
+ "Epoch: 0377 avg_cer= 0.692307711\n",
+ "Epoch: 0378 avg_cer= 0.692307711\n",
+ "Epoch: 0379 avg_cer= 0.692307711\n",
+ "Epoch: 0380 avg_cer= 0.692307711\n",
+ "Epoch: 0381 avg_cer= 0.692307711\n",
+ "Epoch: 0382 avg_cer= 0.692307711\n",
+ "Epoch: 0383 avg_cer= 0.692307711\n",
+ "Epoch: 0384 avg_cer= 0.692307711\n",
+ "Epoch: 0385 avg_cer= 0.692307711\n",
+ "Epoch: 0386 avg_cer= 0.692307711\n",
+ "Epoch: 0387 avg_cer= 0.692307711\n",
+ "Epoch: 0388 avg_cer= 0.692307711\n",
+ "Epoch: 0389 avg_cer= 0.692307711\n",
+ "Epoch: 0390 avg_cer= 0.807692289\n",
+ "Epoch: 0391 avg_cer= 0.807692289\n",
+ "Epoch: 0392 avg_cer= 0.692307711\n",
+ "Epoch: 0393 avg_cer= 0.692307711\n",
+ "Epoch: 0394 avg_cer= 0.692307711\n",
+ "Epoch: 0395 avg_cer= 0.750000000\n",
+ "Epoch: 0396 avg_cer= 0.750000000\n",
+ "Epoch: 0397 avg_cer= 0.750000000\n",
+ "Epoch: 0398 avg_cer= 0.750000000\n",
+ "Epoch: 0399 avg_cer= 0.750000000\n",
+ "Epoch: 0400 avg_cer= 0.692307711\n",
+ "Epoch: 0401 avg_cer= 0.692307711\n",
+ "Epoch: 0402 avg_cer= 0.692307711\n",
+ "Epoch: 0403 avg_cer= 0.692307711\n",
+ "Epoch: 0404 avg_cer= 0.692307711\n",
+ "Epoch: 0405 avg_cer= 0.692307711\n",
+ "Epoch: 0406 avg_cer= 0.692307711\n",
+ "Epoch: 0407 avg_cer= 0.692307711\n",
+ "Epoch: 0408 avg_cer= 0.692307711\n",
+ "Epoch: 0409 avg_cer= 0.692307711\n",
+ "Epoch: 0410 avg_cer= 0.692307711\n",
+ "Epoch: 0411 avg_cer= 0.692307711\n",
+ "Epoch: 0412 avg_cer= 0.692307711\n",
+ "Epoch: 0413 avg_cer= 0.692307711\n",
+ "Epoch: 0414 avg_cer= 0.692307711\n",
+ "Epoch: 0415 avg_cer= 0.692307711\n",
+ "Epoch: 0416 avg_cer= 0.692307711\n",
+ "Epoch: 0417 avg_cer= 0.692307711\n",
+ "Epoch: 0418 avg_cer= 0.692307711\n",
+ "Epoch: 0419 avg_cer= 0.692307711\n",
+ "Epoch: 0420 avg_cer= 0.692307711\n",
+ "Epoch: 0421 avg_cer= 0.692307711\n",
+ "Epoch: 0422 avg_cer= 0.692307711\n",
+ "Epoch: 0423 avg_cer= 0.692307711\n",
+ "Epoch: 0424 avg_cer= 0.692307711\n",
+ "Epoch: 0425 avg_cer= 0.692307711\n",
+ "Epoch: 0426 avg_cer= 0.692307711\n",
+ "Epoch: 0427 avg_cer= 0.692307711\n",
+ "Epoch: 0428 avg_cer= 0.692307711\n",
+ "Epoch: 0429 avg_cer= 0.692307711\n",
+ "Epoch: 0430 avg_cer= 0.692307711\n",
+ "Epoch: 0431 avg_cer= 0.692307711\n",
+ "Epoch: 0432 avg_cer= 0.692307711\n",
+ "Epoch: 0433 avg_cer= 0.692307711\n",
+ "Epoch: 0434 avg_cer= 0.692307711\n",
+ "Epoch: 0435 avg_cer= 0.692307711\n",
+ "Epoch: 0436 avg_cer= 0.692307711\n",
+ "Epoch: 0437 avg_cer= 0.692307711\n",
+ "Epoch: 0438 avg_cer= 0.692307711\n",
+ "Epoch: 0439 avg_cer= 0.692307711\n",
+ "Epoch: 0440 avg_cer= 0.692307711\n",
+ "Epoch: 0441 avg_cer= 0.692307711\n",
+ "Epoch: 0442 avg_cer= 0.692307711\n",
+ "Epoch: 0443 avg_cer= 0.692307711\n",
+ "Epoch: 0444 avg_cer= 0.692307711\n",
+ "Epoch: 0445 avg_cer= 0.692307711\n",
+ "Epoch: 0446 avg_cer= 0.692307711\n",
+ "Epoch: 0447 avg_cer= 0.692307711\n",
+ "Epoch: 0448 avg_cer= 0.692307711\n",
+ "Epoch: 0449 avg_cer= 0.692307711\n",
+ "Epoch: 0450 avg_cer= 0.692307711\n",
+ "Epoch: 0451 avg_cer= 0.692307711\n",
+ "Epoch: 0452 avg_cer= 0.692307711\n",
+ "Epoch: 0453 avg_cer= 0.692307711\n",
+ "Epoch: 0454 avg_cer= 0.692307711\n",
+ "Epoch: 0455 avg_cer= 0.692307711\n",
+ "Epoch: 0456 avg_cer= 0.692307711\n",
+ "Epoch: 0457 avg_cer= 0.692307711\n",
+ "Epoch: 0458 avg_cer= 0.692307711\n",
+ "Epoch: 0459 avg_cer= 0.692307711\n",
+ "Epoch: 0460 avg_cer= 0.692307711\n",
+ "Epoch: 0461 avg_cer= 0.692307711\n",
+ "Epoch: 0462 avg_cer= 0.692307711\n",
+ "Epoch: 0463 avg_cer= 0.692307711\n",
+ "Epoch: 0464 avg_cer= 0.730769217\n",
+ "Epoch: 0465 avg_cer= 0.711538434\n",
+ "Epoch: 0466 avg_cer= 0.711538434\n",
+ "Epoch: 0467 avg_cer= 0.711538434\n",
+ "Epoch: 0468 avg_cer= 0.711538434\n",
+ "Epoch: 0469 avg_cer= 0.711538434\n",
+ "Epoch: 0470 avg_cer= 0.711538434\n",
+ "Epoch: 0471 avg_cer= 0.711538434\n",
+ "Epoch: 0472 avg_cer= 0.711538434\n",
+ "Epoch: 0473 avg_cer= 0.711538434\n",
+ "Epoch: 0474 avg_cer= 0.711538434\n",
+ "Epoch: 0475 avg_cer= 0.711538434\n",
+ "Epoch: 0476 avg_cer= 0.711538434\n",
+ "Epoch: 0477 avg_cer= 0.711538434\n",
+ "Epoch: 0478 avg_cer= 0.711538434\n",
+ "Epoch: 0479 avg_cer= 0.692307711\n",
+ "Epoch: 0480 avg_cer= 0.711538434\n",
+ "Epoch: 0481 avg_cer= 0.711538434\n",
+ "Epoch: 0482 avg_cer= 0.711538434\n",
+ "Epoch: 0483 avg_cer= 0.711538434\n",
+ "Epoch: 0484 avg_cer= 0.711538434\n",
+ "Epoch: 0485 avg_cer= 0.711538434\n",
+ "Epoch: 0486 avg_cer= 0.711538434\n",
+ "Epoch: 0487 avg_cer= 0.711538434\n",
+ "Epoch: 0488 avg_cer= 0.711538434\n",
+ "Epoch: 0489 avg_cer= 0.711538434\n",
+ "Epoch: 0490 avg_cer= 0.711538434\n",
+ "Epoch: 0491 avg_cer= 0.711538434\n",
+ "Epoch: 0492 avg_cer= 0.711538434\n",
+ "Epoch: 0493 avg_cer= 0.711538434\n",
+ "Epoch: 0494 avg_cer= 0.711538434\n",
+ "Epoch: 0495 avg_cer= 0.711538434\n",
+ "Epoch: 0496 avg_cer= 0.711538434\n",
+ "Epoch: 0497 avg_cer= 0.711538434\n",
+ "Epoch: 0498 avg_cer= 0.711538434\n",
+ "Epoch: 0499 avg_cer= 0.711538434\n",
+ "Epoch: 0500 avg_cer= 0.711538434\n",
+ "Epoch: 0501 avg_cer= 0.711538434\n",
+ "Epoch: 0502 avg_cer= 0.711538434\n",
+ "Epoch: 0503 avg_cer= 0.711538434\n",
+ "Epoch: 0504 avg_cer= 0.692307711\n",
+ "Epoch: 0505 avg_cer= 0.692307711\n",
+ "Epoch: 0506 avg_cer= 0.692307711\n",
+ "Epoch: 0507 avg_cer= 0.692307711\n",
+ "Epoch: 0508 avg_cer= 0.692307711\n",
+ "Epoch: 0509 avg_cer= 0.692307711\n",
+ "Epoch: 0510 avg_cer= 0.692307711\n",
+ "Epoch: 0511 avg_cer= 0.692307711\n",
+ "Epoch: 0512 avg_cer= 0.692307711\n",
+ "Epoch: 0513 avg_cer= 0.692307711\n",
+ "Epoch: 0514 avg_cer= 0.692307711\n",
+ "Epoch: 0515 avg_cer= 0.692307711\n",
+ "Epoch: 0516 avg_cer= 0.692307711\n",
+ "Epoch: 0517 avg_cer= 0.692307711\n",
+ "Epoch: 0518 avg_cer= 0.692307711\n",
+ "Epoch: 0519 avg_cer= 0.692307711\n",
+ "Epoch: 0520 avg_cer= 0.692307711\n",
+ "Epoch: 0521 avg_cer= 0.692307711\n",
+ "Epoch: 0522 avg_cer= 0.692307711\n",
+ "Epoch: 0523 avg_cer= 0.692307711\n",
+ "Epoch: 0524 avg_cer= 0.692307711\n",
+ "Epoch: 0525 avg_cer= 0.692307711\n",
+ "Epoch: 0526 avg_cer= 0.692307711\n",
+ "Epoch: 0527 avg_cer= 0.692307711\n",
+ "Epoch: 0528 avg_cer= 0.692307711\n",
+ "Epoch: 0529 avg_cer= 0.692307711\n",
+ "Epoch: 0530 avg_cer= 0.692307711\n",
+ "Epoch: 0531 avg_cer= 0.692307711\n",
+ "Epoch: 0532 avg_cer= 0.692307711\n",
+ "Epoch: 0533 avg_cer= 0.692307711\n",
+ "Epoch: 0534 avg_cer= 0.692307711\n",
+ "Epoch: 0535 avg_cer= 0.692307711\n",
+ "Epoch: 0536 avg_cer= 0.692307711\n",
+ "Epoch: 0537 avg_cer= 0.692307711\n",
+ "Epoch: 0538 avg_cer= 0.692307711\n",
+ "Epoch: 0539 avg_cer= 0.692307711\n",
+ "Epoch: 0540 avg_cer= 0.692307711\n",
+ "Epoch: 0541 avg_cer= 0.692307711\n",
+ "Epoch: 0542 avg_cer= 0.692307711\n",
+ "Epoch: 0543 avg_cer= 0.692307711\n",
+ "Epoch: 0544 avg_cer= 0.692307711\n",
+ "Epoch: 0545 avg_cer= 0.692307711\n",
+ "Epoch: 0546 avg_cer= 0.692307711\n",
+ "Epoch: 0547 avg_cer= 0.692307711\n",
+ "Epoch: 0548 avg_cer= 0.692307711\n",
+ "Epoch: 0549 avg_cer= 0.692307711\n",
+ "Epoch: 0550 avg_cer= 0.692307711\n",
+ "Epoch: 0551 avg_cer= 0.692307711\n",
+ "Epoch: 0552 avg_cer= 0.692307711\n",
+ "Epoch: 0553 avg_cer= 0.692307711\n",
+ "Epoch: 0554 avg_cer= 0.692307711\n",
+ "Epoch: 0555 avg_cer= 0.692307711\n",
+ "Epoch: 0556 avg_cer= 0.692307711\n",
+ "Epoch: 0557 avg_cer= 0.692307711\n",
+ "Epoch: 0558 avg_cer= 0.692307711\n",
+ "Epoch: 0559 avg_cer= 0.692307711\n",
+ "Epoch: 0560 avg_cer= 0.692307711\n",
+ "Epoch: 0561 avg_cer= 0.692307711\n",
+ "Epoch: 0562 avg_cer= 0.692307711\n",
+ "Epoch: 0563 avg_cer= 0.692307711\n",
+ "Epoch: 0564 avg_cer= 0.711538434\n",
+ "Epoch: 0565 avg_cer= 0.711538434\n",
+ "Epoch: 0566 avg_cer= 0.692307711\n",
+ "Epoch: 0567 avg_cer= 0.692307711\n",
+ "Epoch: 0568 avg_cer= 0.692307711\n",
+ "Epoch: 0569 avg_cer= 0.692307711\n",
+ "Epoch: 0570 avg_cer= 0.692307711\n",
+ "Epoch: 0571 avg_cer= 0.692307711\n",
+ "Epoch: 0572 avg_cer= 0.750000000\n",
+ "Epoch: 0573 avg_cer= 0.711538434\n",
+ "Epoch: 0574 avg_cer= 0.711538434\n",
+ "Epoch: 0575 avg_cer= 0.692307711\n",
+ "Epoch: 0576 avg_cer= 0.711538434\n",
+ "Epoch: 0577 avg_cer= 0.730769217\n",
+ "Epoch: 0578 avg_cer= 0.788461566\n",
+ "Epoch: 0579 avg_cer= 0.769230783\n",
+ "Epoch: 0580 avg_cer= 0.750000000\n",
+ "Epoch: 0581 avg_cer= 0.750000000\n",
+ "Epoch: 0582 avg_cer= 0.788461566\n",
+ "Epoch: 0583 avg_cer= 0.711538434\n",
+ "Epoch: 0584 avg_cer= 0.730769217\n",
+ "Epoch: 0585 avg_cer= 0.788461566\n",
+ "Epoch: 0586 avg_cer= 0.692307711\n",
+ "Epoch: 0587 avg_cer= 0.692307711\n",
+ "Epoch: 0588 avg_cer= 0.711538434\n",
+ "Epoch: 0589 avg_cer= 0.692307711\n",
+ "Epoch: 0590 avg_cer= 0.673076928\n",
+ "Epoch: 0591 avg_cer= 0.711538434\n",
+ "Epoch: 0592 avg_cer= 0.673076928\n",
+ "Epoch: 0593 avg_cer= 0.653846145\n",
+ "Epoch: 0594 avg_cer= 0.634615362\n",
+ "Epoch: 0595 avg_cer= 0.692307711\n",
+ "Epoch: 0596 avg_cer= 0.673076928\n",
+ "Epoch: 0597 avg_cer= 0.653846145\n",
+ "Epoch: 0598 avg_cer= 0.653846145\n",
+ "Epoch: 0599 avg_cer= 0.653846145\n",
+ "Epoch: 0600 avg_cer= 0.673076928\n",
+ "Epoch: 0601 avg_cer= 0.692307711\n",
+ "Epoch: 0602 avg_cer= 0.673076928\n",
+ "Epoch: 0603 avg_cer= 0.634615362\n",
+ "Epoch: 0604 avg_cer= 0.653846145\n",
+ "Epoch: 0605 avg_cer= 0.634615362\n",
+ "Epoch: 0606 avg_cer= 0.634615362\n",
+ "Epoch: 0607 avg_cer= 0.634615362\n",
+ "Epoch: 0608 avg_cer= 0.634615362\n",
+ "Epoch: 0609 avg_cer= 0.634615362\n",
+ "Epoch: 0610 avg_cer= 0.615384638\n",
+ "Epoch: 0611 avg_cer= 0.634615362\n",
+ "Epoch: 0612 avg_cer= 0.634615362\n",
+ "Epoch: 0613 avg_cer= 0.634615362\n",
+ "Epoch: 0614 avg_cer= 0.634615362\n",
+ "Epoch: 0615 avg_cer= 0.615384638\n",
+ "Epoch: 0616 avg_cer= 0.634615362\n",
+ "Epoch: 0617 avg_cer= 0.615384638\n",
+ "Epoch: 0618 avg_cer= 0.576923072\n",
+ "Epoch: 0619 avg_cer= 0.596153855\n",
+ "Epoch: 0620 avg_cer= 0.576923072\n",
+ "Epoch: 0621 avg_cer= 0.576923072\n",
+ "Epoch: 0622 avg_cer= 0.596153855\n",
+ "Epoch: 0623 avg_cer= 0.596153855\n",
+ "Epoch: 0624 avg_cer= 0.576923072\n",
+ "Epoch: 0625 avg_cer= 0.596153855\n",
+ "Epoch: 0626 avg_cer= 0.596153855\n",
+ "Epoch: 0627 avg_cer= 0.576923072\n",
+ "Epoch: 0628 avg_cer= 0.576923072\n",
+ "Epoch: 0629 avg_cer= 0.576923072\n",
+ "Epoch: 0630 avg_cer= 0.576923072\n",
+ "Epoch: 0631 avg_cer= 0.576923072\n",
+ "Epoch: 0632 avg_cer= 0.596153855\n",
+ "Epoch: 0633 avg_cer= 0.596153855\n",
+ "Epoch: 0634 avg_cer= 0.596153855\n",
+ "Epoch: 0635 avg_cer= 0.557692289\n",
+ "Epoch: 0636 avg_cer= 0.557692289\n",
+ "Epoch: 0637 avg_cer= 0.596153855\n",
+ "Epoch: 0638 avg_cer= 0.576923072\n",
+ "Epoch: 0639 avg_cer= 0.576923072\n",
+ "Epoch: 0640 avg_cer= 0.557692289\n",
+ "Epoch: 0641 avg_cer= 0.557692289\n",
+ "Epoch: 0642 avg_cer= 0.557692289\n",
+ "Epoch: 0643 avg_cer= 0.576923072\n",
+ "Epoch: 0644 avg_cer= 0.538461566\n",
+ "Epoch: 0645 avg_cer= 0.576923072\n",
+ "Epoch: 0646 avg_cer= 0.596153855\n",
+ "Epoch: 0647 avg_cer= 0.596153855\n",
+ "Epoch: 0648 avg_cer= 0.576923072\n",
+ "Epoch: 0649 avg_cer= 0.576923072\n",
+ "Epoch: 0650 avg_cer= 0.576923072\n",
+ "Epoch: 0651 avg_cer= 0.576923072\n",
+ "Epoch: 0652 avg_cer= 0.615384638\n",
+ "Epoch: 0653 avg_cer= 0.615384638\n",
+ "Epoch: 0654 avg_cer= 0.596153855\n",
+ "Epoch: 0655 avg_cer= 0.538461566\n",
+ "Epoch: 0656 avg_cer= 0.557692289\n",
+ "Epoch: 0657 avg_cer= 0.557692289\n",
+ "Epoch: 0658 avg_cer= 0.576923072\n",
+ "Epoch: 0659 avg_cer= 0.576923072\n",
+ "Epoch: 0660 avg_cer= 0.576923072\n",
+ "Epoch: 0661 avg_cer= 0.557692289\n",
+ "Epoch: 0662 avg_cer= 0.519230783\n",
+ "Epoch: 0663 avg_cer= 0.500000000\n",
+ "Epoch: 0664 avg_cer= 0.557692289\n",
+ "Epoch: 0665 avg_cer= 0.576923072\n",
+ "Epoch: 0666 avg_cer= 0.576923072\n",
+ "Epoch: 0667 avg_cer= 0.557692289\n",
+ "Epoch: 0668 avg_cer= 0.557692289\n",
+ "Epoch: 0669 avg_cer= 0.557692289\n",
+ "Epoch: 0670 avg_cer= 0.557692289\n",
+ "Epoch: 0671 avg_cer= 0.557692289\n",
+ "Epoch: 0672 avg_cer= 0.557692289\n",
+ "Epoch: 0673 avg_cer= 0.557692289\n",
+ "Epoch: 0674 avg_cer= 0.538461566\n",
+ "Epoch: 0675 avg_cer= 0.538461566\n",
+ "Epoch: 0676 avg_cer= 0.538461566\n",
+ "Epoch: 0677 avg_cer= 0.538461566\n",
+ "Epoch: 0678 avg_cer= 0.480769217\n",
+ "Epoch: 0679 avg_cer= 0.519230783\n",
+ "Epoch: 0680 avg_cer= 0.538461566\n",
+ "Epoch: 0681 avg_cer= 0.538461566\n",
+ "Epoch: 0682 avg_cer= 0.596153855\n",
+ "Epoch: 0683 avg_cer= 0.538461566\n",
+ "Epoch: 0684 avg_cer= 0.538461566\n",
+ "Epoch: 0685 avg_cer= 0.903846145\n",
+ "Epoch: 0686 avg_cer= 0.538461566\n",
+ "Epoch: 0687 avg_cer= 0.788461566\n",
+ "Epoch: 0688 avg_cer= 0.576923072\n",
+ "Epoch: 0689 avg_cer= 0.576923072\n",
+ "Epoch: 0690 avg_cer= 1.653846145\n",
+ "Epoch: 0691 avg_cer= 0.634615362\n",
+ "Epoch: 0692 avg_cer= 0.769230783\n",
+ "Epoch: 0693 avg_cer= 0.769230783\n",
+ "Epoch: 0694 avg_cer= 0.730769217\n",
+ "Epoch: 0695 avg_cer= 0.615384638\n",
+ "Epoch: 0696 avg_cer= 0.596153855\n",
+ "Epoch: 0697 avg_cer= 0.596153855\n",
+ "Epoch: 0698 avg_cer= 0.653846145\n",
+ "Epoch: 0699 avg_cer= 0.596153855\n",
+ "Epoch: 0700 avg_cer= 0.615384638\n",
+ "Epoch: 0701 avg_cer= 0.557692289\n",
+ "Epoch: 0702 avg_cer= 0.538461566\n",
+ "Epoch: 0703 avg_cer= 0.538461566\n",
+ "Epoch: 0704 avg_cer= 0.519230783\n",
+ "Epoch: 0705 avg_cer= 0.576923072\n",
+ "Epoch: 0706 avg_cer= 0.519230783\n",
+ "Epoch: 0707 avg_cer= 0.480769217\n",
+ "Epoch: 0708 avg_cer= 0.500000000\n",
+ "Epoch: 0709 avg_cer= 0.500000000\n",
+ "Epoch: 0710 avg_cer= 0.538461566\n",
+ "Epoch: 0711 avg_cer= 0.576923072\n",
+ "Epoch: 0712 avg_cer= 0.557692289\n",
+ "Epoch: 0713 avg_cer= 0.538461566\n",
+ "Epoch: 0714 avg_cer= 0.519230783\n",
+ "Epoch: 0715 avg_cer= 0.557692289\n",
+ "Epoch: 0716 avg_cer= 0.576923072\n",
+ "Epoch: 0717 avg_cer= 0.442307681\n",
+ "Epoch: 0718 avg_cer= 0.442307681\n",
+ "Epoch: 0719 avg_cer= 0.461538464\n",
+ "Epoch: 0720 avg_cer= 0.461538464\n",
+ "Epoch: 0721 avg_cer= 0.519230783\n",
+ "Epoch: 0722 avg_cer= 0.500000000\n",
+ "Epoch: 0723 avg_cer= 0.480769217\n",
+ "Epoch: 0724 avg_cer= 0.461538464\n",
+ "Epoch: 0725 avg_cer= 0.500000000\n",
+ "Epoch: 0726 avg_cer= 0.500000000\n",
+ "Epoch: 0727 avg_cer= 0.538461566\n",
+ "Epoch: 0728 avg_cer= 0.538461566\n",
+ "Epoch: 0729 avg_cer= 0.557692289\n",
+ "Epoch: 0730 avg_cer= 0.500000000\n",
+ "Epoch: 0731 avg_cer= 0.538461566\n",
+ "Epoch: 0732 avg_cer= 0.519230783\n",
+ "Epoch: 0733 avg_cer= 0.500000000\n",
+ "Epoch: 0734 avg_cer= 0.480769217\n",
+ "Epoch: 0735 avg_cer= 0.500000000\n",
+ "Epoch: 0736 avg_cer= 0.519230783\n",
+ "Epoch: 0737 avg_cer= 0.500000000\n",
+ "Epoch: 0738 avg_cer= 0.519230783\n",
+ "Epoch: 0739 avg_cer= 0.500000000\n",
+ "Epoch: 0740 avg_cer= 0.519230783\n",
+ "Epoch: 0741 avg_cer= 0.480769217\n",
+ "Epoch: 0742 avg_cer= 0.461538464\n",
+ "Epoch: 0743 avg_cer= 0.500000000\n",
+ "Epoch: 0744 avg_cer= 0.500000000\n",
+ "Epoch: 0745 avg_cer= 0.500000000\n",
+ "Epoch: 0746 avg_cer= 0.500000000\n",
+ "Epoch: 0747 avg_cer= 0.519230783\n",
+ "Epoch: 0748 avg_cer= 0.500000000\n",
+ "Epoch: 0749 avg_cer= 0.500000000\n",
+ "Epoch: 0750 avg_cer= 0.500000000\n",
+ "Epoch: 0751 avg_cer= 0.480769217\n",
+ "Epoch: 0752 avg_cer= 0.480769217\n",
+ "Epoch: 0753 avg_cer= 0.480769217\n",
+ "Epoch: 0754 avg_cer= 0.480769217\n",
+ "Epoch: 0755 avg_cer= 0.519230783\n",
+ "Epoch: 0756 avg_cer= 0.500000000\n",
+ "Epoch: 0757 avg_cer= 0.500000000\n",
+ "Epoch: 0758 avg_cer= 0.480769217\n",
+ "Epoch: 0759 avg_cer= 0.480769217\n",
+ "Epoch: 0760 avg_cer= 0.480769217\n",
+ "Epoch: 0761 avg_cer= 0.480769217\n",
+ "Epoch: 0762 avg_cer= 0.480769217\n",
+ "Epoch: 0763 avg_cer= 0.461538464\n",
+ "Epoch: 0764 avg_cer= 0.480769217\n",
+ "Epoch: 0765 avg_cer= 0.480769217\n",
+ "Epoch: 0766 avg_cer= 0.461538464\n",
+ "Epoch: 0767 avg_cer= 0.500000000\n",
+ "Epoch: 0768 avg_cer= 0.480769217\n",
+ "Epoch: 0769 avg_cer= 0.500000000\n",
+ "Epoch: 0770 avg_cer= 0.480769217\n",
+ "Epoch: 0771 avg_cer= 0.480769217\n",
+ "Epoch: 0772 avg_cer= 0.461538464\n",
+ "Epoch: 0773 avg_cer= 0.461538464\n",
+ "Epoch: 0774 avg_cer= 0.480769217\n",
+ "Epoch: 0775 avg_cer= 0.480769217\n",
+ "Epoch: 0776 avg_cer= 0.480769217\n",
+ "Epoch: 0777 avg_cer= 0.480769217\n",
+ "Epoch: 0778 avg_cer= 0.480769217\n",
+ "Epoch: 0779 avg_cer= 0.480769217\n",
+ "Epoch: 0780 avg_cer= 0.461538464\n",
+ "Epoch: 0781 avg_cer= 0.480769217\n",
+ "Epoch: 0782 avg_cer= 0.480769217\n",
+ "Epoch: 0783 avg_cer= 0.480769217\n",
+ "Epoch: 0784 avg_cer= 0.480769217\n",
+ "Epoch: 0785 avg_cer= 0.480769217\n",
+ "Epoch: 0786 avg_cer= 0.480769217\n",
+ "Epoch: 0787 avg_cer= 0.461538464\n",
+ "Epoch: 0788 avg_cer= 0.461538464\n",
+ "Epoch: 0789 avg_cer= 0.461538464\n",
+ "Epoch: 0790 avg_cer= 0.480769217\n",
+ "Epoch: 0791 avg_cer= 0.500000000\n",
+ "Epoch: 0792 avg_cer= 0.500000000\n",
+ "Epoch: 0793 avg_cer= 0.480769217\n",
+ "Epoch: 0794 avg_cer= 0.480769217\n",
+ "Epoch: 0795 avg_cer= 0.500000000\n",
+ "Epoch: 0796 avg_cer= 0.461538464\n",
+ "Epoch: 0797 avg_cer= 0.500000000\n",
+ "Epoch: 0798 avg_cer= 0.461538464\n",
+ "Epoch: 0799 avg_cer= 0.461538464\n",
+ "Epoch: 0800 avg_cer= 0.442307681\n",
+ "Epoch: 0801 avg_cer= 0.461538464\n",
+ "Epoch: 0802 avg_cer= 0.461538464\n",
+ "Epoch: 0803 avg_cer= 0.461538464\n",
+ "Epoch: 0804 avg_cer= 0.461538464\n",
+ "Epoch: 0805 avg_cer= 0.423076928\n",
+ "Epoch: 0806 avg_cer= 0.461538464\n",
+ "Epoch: 0807 avg_cer= 0.423076928\n",
+ "Epoch: 0808 avg_cer= 0.442307681\n",
+ "Epoch: 0809 avg_cer= 0.461538464\n",
+ "Epoch: 0810 avg_cer= 0.461538464\n",
+ "Epoch: 0811 avg_cer= 0.461538464\n",
+ "Epoch: 0812 avg_cer= 0.442307681\n",
+ "Epoch: 0813 avg_cer= 0.403846145\n",
+ "Epoch: 0814 avg_cer= 0.403846145\n",
+ "Epoch: 0815 avg_cer= 0.403846145\n",
+ "Epoch: 0816 avg_cer= 0.423076928\n",
+ "Epoch: 0817 avg_cer= 0.403846145\n",
+ "Epoch: 0818 avg_cer= 0.403846145\n",
+ "Epoch: 0819 avg_cer= 0.384615391\n",
+ "Epoch: 0820 avg_cer= 0.403846145\n",
+ "Epoch: 0821 avg_cer= 0.403846145\n",
+ "Epoch: 0822 avg_cer= 0.384615391\n",
+ "Epoch: 0823 avg_cer= 0.403846145\n",
+ "Epoch: 0824 avg_cer= 0.403846145\n",
+ "Epoch: 0825 avg_cer= 0.403846145\n",
+ "Epoch: 0826 avg_cer= 0.403846145\n",
+ "Epoch: 0827 avg_cer= 0.384615391\n",
+ "Epoch: 0828 avg_cer= 0.403846145\n",
+ "Epoch: 0829 avg_cer= 0.423076928\n",
+ "Epoch: 0830 avg_cer= 0.403846145\n",
+ "Epoch: 0831 avg_cer= 0.403846145\n",
+ "Epoch: 0832 avg_cer= 0.442307681\n",
+ "Epoch: 0833 avg_cer= 0.423076928\n",
+ "Epoch: 0834 avg_cer= 0.442307681\n",
+ "Epoch: 0835 avg_cer= 0.442307681\n",
+ "Epoch: 0836 avg_cer= 0.384615391\n",
+ "Epoch: 0837 avg_cer= 0.403846145\n",
+ "Epoch: 0838 avg_cer= 0.384615391\n",
+ "Epoch: 0839 avg_cer= 0.365384609\n",
+ "Epoch: 0840 avg_cer= 0.365384609\n",
+ "Epoch: 0841 avg_cer= 0.384615391\n",
+ "Epoch: 0842 avg_cer= 0.365384609\n",
+ "Epoch: 0843 avg_cer= 0.403846145\n",
+ "Epoch: 0844 avg_cer= 0.365384609\n",
+ "Epoch: 0845 avg_cer= 0.365384609\n",
+ "Epoch: 0846 avg_cer= 0.365384609\n",
+ "Epoch: 0847 avg_cer= 0.365384609\n",
+ "Epoch: 0848 avg_cer= 0.365384609\n",
+ "Epoch: 0849 avg_cer= 0.346153855\n",
+ "Epoch: 0850 avg_cer= 0.365384609\n",
+ "Epoch: 0851 avg_cer= 0.384615391\n",
+ "Epoch: 0852 avg_cer= 0.365384609\n",
+ "Epoch: 0853 avg_cer= 0.326923072\n",
+ "Epoch: 0854 avg_cer= 0.365384609\n",
+ "Epoch: 0855 avg_cer= 0.346153855\n",
+ "Epoch: 0856 avg_cer= 0.326923072\n",
+ "Epoch: 0857 avg_cer= 0.346153855\n",
+ "Epoch: 0858 avg_cer= 0.346153855\n",
+ "Epoch: 0859 avg_cer= 0.326923072\n",
+ "Epoch: 0860 avg_cer= 0.326923072\n",
+ "Epoch: 0861 avg_cer= 0.307692319\n",
+ "Epoch: 0862 avg_cer= 0.307692319\n",
+ "Epoch: 0863 avg_cer= 0.307692319\n",
+ "Epoch: 0864 avg_cer= 0.326923072\n",
+ "Epoch: 0865 avg_cer= 0.326923072\n",
+ "Epoch: 0866 avg_cer= 0.307692319\n",
+ "Epoch: 0867 avg_cer= 0.307692319\n",
+ "Epoch: 0868 avg_cer= 0.307692319\n",
+ "Epoch: 0869 avg_cer= 0.307692319\n",
+ "Epoch: 0870 avg_cer= 0.307692319\n",
+ "Epoch: 0871 avg_cer= 0.307692319\n",
+ "Epoch: 0872 avg_cer= 0.307692319\n",
+ "Epoch: 0873 avg_cer= 0.307692319\n",
+ "Epoch: 0874 avg_cer= 0.288461536\n",
+ "Epoch: 0875 avg_cer= 0.307692319\n",
+ "Epoch: 0876 avg_cer= 0.307692319\n",
+ "Epoch: 0877 avg_cer= 0.288461536\n",
+ "Epoch: 0878 avg_cer= 0.307692319\n",
+ "Epoch: 0879 avg_cer= 0.307692319\n",
+ "Epoch: 0880 avg_cer= 0.307692319\n",
+ "Epoch: 0881 avg_cer= 0.307692319\n",
+ "Epoch: 0882 avg_cer= 0.307692319\n",
+ "Epoch: 0883 avg_cer= 0.307692319\n",
+ "Epoch: 0884 avg_cer= 0.307692319\n",
+ "Epoch: 0885 avg_cer= 0.307692319\n",
+ "Epoch: 0886 avg_cer= 0.403846145\n",
+ "Epoch: 0887 avg_cer= 0.384615391\n",
+ "Epoch: 0888 avg_cer= 0.403846145\n",
+ "Epoch: 0889 avg_cer= 0.403846145\n",
+ "Epoch: 0890 avg_cer= 0.365384609\n",
+ "Epoch: 0891 avg_cer= 0.403846145\n",
+ "Epoch: 0892 avg_cer= 0.326923072\n",
+ "Epoch: 0893 avg_cer= 0.307692319\n",
+ "Epoch: 0894 avg_cer= 0.403846145\n",
+ "Epoch: 0895 avg_cer= 0.326923072\n",
+ "Epoch: 0896 avg_cer= 0.346153855\n",
+ "Epoch: 0897 avg_cer= 0.307692319\n",
+ "Epoch: 0898 avg_cer= 0.346153855\n",
+ "Epoch: 0899 avg_cer= 0.365384609\n",
+ "Epoch: 0900 avg_cer= 0.365384609\n",
+ "Epoch: 0901 avg_cer= 0.269230783\n",
+ "Epoch: 0902 avg_cer= 0.326923072\n",
+ "Epoch: 0903 avg_cer= 0.269230783\n",
+ "Epoch: 0904 avg_cer= 0.288461536\n",
+ "Epoch: 0905 avg_cer= 0.288461536\n",
+ "Epoch: 0906 avg_cer= 0.269230783\n",
+ "Epoch: 0907 avg_cer= 0.269230783\n",
+ "Epoch: 0908 avg_cer= 0.288461536\n",
+ "Epoch: 0909 avg_cer= 0.288461536\n",
+ "Epoch: 0910 avg_cer= 0.288461536\n",
+ "Epoch: 0911 avg_cer= 0.269230783\n",
+ "Epoch: 0912 avg_cer= 0.288461536\n",
+ "Epoch: 0913 avg_cer= 0.288461536\n",
+ "Epoch: 0914 avg_cer= 0.269230783\n",
+ "Epoch: 0915 avg_cer= 0.288461536\n",
+ "Epoch: 0916 avg_cer= 0.269230783\n",
+ "Epoch: 0917 avg_cer= 0.288461536\n",
+ "Epoch: 0918 avg_cer= 0.269230783\n",
+ "Epoch: 0919 avg_cer= 0.288461536\n",
+ "Epoch: 0920 avg_cer= 0.288461536\n",
+ "Epoch: 0921 avg_cer= 0.269230783\n",
+ "Epoch: 0922 avg_cer= 0.288461536\n",
+ "Epoch: 0923 avg_cer= 0.288461536\n",
+ "Epoch: 0924 avg_cer= 0.269230783\n",
+ "Epoch: 0925 avg_cer= 0.288461536\n",
+ "Epoch: 0926 avg_cer= 0.269230783\n",
+ "Epoch: 0927 avg_cer= 0.288461536\n",
+ "Epoch: 0928 avg_cer= 0.269230783\n",
+ "Epoch: 0929 avg_cer= 0.288461536\n",
+ "Epoch: 0930 avg_cer= 0.288461536\n",
+ "Epoch: 0931 avg_cer= 0.288461536\n",
+ "Epoch: 0932 avg_cer= 0.288461536\n",
+ "Epoch: 0933 avg_cer= 0.288461536\n",
+ "Epoch: 0934 avg_cer= 0.326923072\n",
+ "Epoch: 0935 avg_cer= 0.288461536\n",
+ "Epoch: 0936 avg_cer= 0.346153855\n",
+ "Epoch: 0937 avg_cer= 0.326923072\n",
+ "Epoch: 0938 avg_cer= 0.269230783\n",
+ "Epoch: 0939 avg_cer= 0.365384609\n",
+ "Epoch: 0940 avg_cer= 0.384615391\n",
+ "Epoch: 0941 avg_cer= 0.307692319\n",
+ "Epoch: 0942 avg_cer= 0.423076928\n",
+ "Epoch: 0943 avg_cer= 0.346153855\n",
+ "Epoch: 0944 avg_cer= 0.384615391\n",
+ "Epoch: 0945 avg_cer= 0.403846145\n",
+ "Epoch: 0946 avg_cer= 0.365384609\n",
+ "Epoch: 0947 avg_cer= 0.326923072\n",
+ "Epoch: 0948 avg_cer= 0.346153855\n",
+ "Epoch: 0949 avg_cer= 0.288461536\n",
+ "Epoch: 0950 avg_cer= 0.307692319\n",
+ "Epoch: 0951 avg_cer= 0.365384609\n",
+ "Epoch: 0952 avg_cer= 0.326923072\n",
+ "Epoch: 0953 avg_cer= 0.346153855\n",
+ "Epoch: 0954 avg_cer= 0.326923072\n",
+ "Epoch: 0955 avg_cer= 0.326923072\n",
+ "Epoch: 0956 avg_cer= 0.307692319\n",
+ "Epoch: 0957 avg_cer= 0.307692319\n",
+ "Epoch: 0958 avg_cer= 0.307692319\n",
+ "Epoch: 0959 avg_cer= 0.346153855\n",
+ "Epoch: 0960 avg_cer= 0.307692319\n",
+ "Epoch: 0961 avg_cer= 0.288461536\n",
+ "Epoch: 0962 avg_cer= 0.307692319\n",
+ "Epoch: 0963 avg_cer= 0.326923072\n",
+ "Epoch: 0964 avg_cer= 0.307692319\n",
+ "Epoch: 0965 avg_cer= 0.269230783\n",
+ "Epoch: 0966 avg_cer= 0.288461536\n",
+ "Epoch: 0967 avg_cer= 0.269230783\n",
+ "Epoch: 0968 avg_cer= 0.288461536\n",
+ "Epoch: 0969 avg_cer= 0.250000000\n",
+ "Epoch: 0970 avg_cer= 0.269230783\n",
+ "Epoch: 0971 avg_cer= 0.288461536\n",
+ "Epoch: 0972 avg_cer= 0.269230783\n",
+ "Epoch: 0973 avg_cer= 0.269230783\n",
+ "Epoch: 0974 avg_cer= 0.288461536\n",
+ "Epoch: 0975 avg_cer= 0.269230783\n",
+ "Epoch: 0976 avg_cer= 0.288461536\n",
+ "Epoch: 0977 avg_cer= 0.250000000\n",
+ "Epoch: 0978 avg_cer= 0.250000000\n",
+ "Epoch: 0979 avg_cer= 0.250000000\n",
+ "Epoch: 0980 avg_cer= 0.250000000\n",
+ "Epoch: 0981 avg_cer= 0.269230783\n",
+ "Epoch: 0982 avg_cer= 0.269230783\n",
+ "Epoch: 0983 avg_cer= 0.269230783\n",
+ "Epoch: 0984 avg_cer= 0.269230783\n",
+ "Epoch: 0985 avg_cer= 0.269230783\n",
+ "Epoch: 0986 avg_cer= 0.269230783\n",
+ "Epoch: 0987 avg_cer= 0.269230783\n",
+ "Epoch: 0988 avg_cer= 0.269230783\n",
+ "Epoch: 0989 avg_cer= 0.269230783\n",
+ "Epoch: 0990 avg_cer= 0.250000000\n",
+ "Epoch: 0991 avg_cer= 0.250000000\n",
+ "Epoch: 0992 avg_cer= 0.250000000\n",
+ "Epoch: 0993 avg_cer= 0.250000000\n",
+ "Epoch: 0994 avg_cer= 0.250000000\n",
+ "Epoch: 0995 avg_cer= 0.250000000\n",
+ "Epoch: 0996 avg_cer= 0.250000000\n",
+ "Epoch: 0997 avg_cer= 0.230769232\n",
+ "Epoch: 0998 avg_cer= 0.230769232\n",
+ "Epoch: 0999 avg_cer= 0.230769232\n",
+ "Epoch: 1000 avg_cer= 0.230769232\n",
+ "Epoch: 1001 avg_cer= 0.230769232\n",
+ "Epoch: 1002 avg_cer= 0.250000000\n",
+ "Epoch: 1003 avg_cer= 0.230769232\n",
+ "Epoch: 1004 avg_cer= 0.250000000\n",
+ "Epoch: 1005 avg_cer= 0.250000000\n",
+ "Epoch: 1006 avg_cer= 0.230769232\n",
+ "Epoch: 1007 avg_cer= 0.250000000\n",
+ "Epoch: 1008 avg_cer= 0.230769232\n",
+ "Epoch: 1009 avg_cer= 0.250000000\n",
+ "Epoch: 1010 avg_cer= 0.250000000\n",
+ "Epoch: 1011 avg_cer= 0.230769232\n",
+ "Epoch: 1012 avg_cer= 0.230769232\n",
+ "Epoch: 1013 avg_cer= 0.230769232\n",
+ "Epoch: 1014 avg_cer= 0.230769232\n",
+ "Epoch: 1015 avg_cer= 0.230769232\n",
+ "Epoch: 1016 avg_cer= 0.211538464\n",
+ "Epoch: 1017 avg_cer= 0.211538464\n",
+ "Epoch: 1018 avg_cer= 0.230769232\n",
+ "Epoch: 1019 avg_cer= 0.211538464\n",
+ "Epoch: 1020 avg_cer= 0.211538464\n",
+ "Epoch: 1021 avg_cer= 0.211538464\n",
+ "Epoch: 1022 avg_cer= 0.211538464\n",
+ "Epoch: 1023 avg_cer= 0.211538464\n",
+ "Epoch: 1024 avg_cer= 0.211538464\n",
+ "Epoch: 1025 avg_cer= 0.211538464\n",
+ "Epoch: 1026 avg_cer= 0.211538464\n",
+ "Epoch: 1027 avg_cer= 0.211538464\n",
+ "Epoch: 1028 avg_cer= 0.211538464\n",
+ "Epoch: 1029 avg_cer= 0.230769232\n",
+ "Epoch: 1030 avg_cer= 0.211538464\n",
+ "Epoch: 1031 avg_cer= 0.211538464\n",
+ "Epoch: 1032 avg_cer= 0.211538464\n",
+ "Epoch: 1033 avg_cer= 0.211538464\n",
+ "Epoch: 1034 avg_cer= 0.211538464\n",
+ "Epoch: 1035 avg_cer= 0.211538464\n",
+ "Epoch: 1036 avg_cer= 0.211538464\n",
+ "Epoch: 1037 avg_cer= 0.211538464\n",
+ "Epoch: 1038 avg_cer= 0.211538464\n",
+ "Epoch: 1039 avg_cer= 0.211538464\n",
+ "Epoch: 1040 avg_cer= 0.211538464\n",
+ "Epoch: 1041 avg_cer= 0.211538464\n",
+ "Epoch: 1042 avg_cer= 0.211538464\n",
+ "Epoch: 1043 avg_cer= 0.211538464\n",
+ "Epoch: 1044 avg_cer= 0.211538464\n",
+ "Epoch: 1045 avg_cer= 0.211538464\n",
+ "Epoch: 1046 avg_cer= 0.211538464\n",
+ "Epoch: 1047 avg_cer= 0.211538464\n",
+ "Epoch: 1048 avg_cer= 0.211538464\n",
+ "Epoch: 1049 avg_cer= 0.211538464\n",
+ "Epoch: 1050 avg_cer= 0.211538464\n",
+ "Epoch: 1051 avg_cer= 0.211538464\n",
+ "Epoch: 1052 avg_cer= 0.211538464\n",
+ "Epoch: 1053 avg_cer= 0.211538464\n",
+ "Epoch: 1054 avg_cer= 0.211538464\n",
+ "Epoch: 1055 avg_cer= 0.211538464\n",
+ "Epoch: 1056 avg_cer= 0.211538464\n",
+ "Epoch: 1057 avg_cer= 0.211538464\n",
+ "Epoch: 1058 avg_cer= 0.211538464\n",
+ "Epoch: 1059 avg_cer= 0.211538464\n",
+ "Epoch: 1060 avg_cer= 0.230769232\n",
+ "Epoch: 1061 avg_cer= 0.211538464\n",
+ "Epoch: 1062 avg_cer= 0.211538464\n",
+ "Epoch: 1063 avg_cer= 0.211538464\n",
+ "Epoch: 1064 avg_cer= 0.230769232\n",
+ "Epoch: 1065 avg_cer= 0.211538464\n",
+ "Epoch: 1066 avg_cer= 0.211538464\n",
+ "Epoch: 1067 avg_cer= 0.211538464\n",
+ "Epoch: 1068 avg_cer= 0.211538464\n",
+ "Epoch: 1069 avg_cer= 0.211538464\n",
+ "Epoch: 1070 avg_cer= 0.192307696\n",
+ "Epoch: 1071 avg_cer= 0.192307696\n",
+ "Epoch: 1072 avg_cer= 0.192307696\n",
+ "Epoch: 1073 avg_cer= 0.211538464\n",
+ "Epoch: 1074 avg_cer= 0.211538464\n",
+ "Epoch: 1075 avg_cer= 0.211538464\n",
+ "Epoch: 1076 avg_cer= 0.211538464\n",
+ "Epoch: 1077 avg_cer= 0.192307696\n",
+ "Epoch: 1078 avg_cer= 0.192307696\n",
+ "Epoch: 1079 avg_cer= 0.192307696\n",
+ "Epoch: 1080 avg_cer= 0.192307696\n",
+ "Epoch: 1081 avg_cer= 0.192307696\n",
+ "Epoch: 1082 avg_cer= 0.173076928\n",
+ "Epoch: 1083 avg_cer= 0.153846160\n",
+ "Epoch: 1084 avg_cer= 0.153846160\n",
+ "Epoch: 1085 avg_cer= 0.173076928\n",
+ "Epoch: 1086 avg_cer= 0.153846160\n",
+ "Epoch: 1087 avg_cer= 0.173076928\n",
+ "Epoch: 1088 avg_cer= 0.211538464\n",
+ "Epoch: 1089 avg_cer= 0.211538464\n",
+ "Epoch: 1090 avg_cer= 0.211538464\n",
+ "Epoch: 1091 avg_cer= 0.211538464\n",
+ "Epoch: 1092 avg_cer= 0.211538464\n",
+ "Epoch: 1093 avg_cer= 0.192307696\n",
+ "Epoch: 1094 avg_cer= 0.192307696\n",
+ "Epoch: 1095 avg_cer= 0.153846160\n",
+ "Epoch: 1096 avg_cer= 0.173076928\n",
+ "Epoch: 1097 avg_cer= 0.173076928\n",
+ "Epoch: 1098 avg_cer= 0.153846160\n",
+ "Epoch: 1099 avg_cer= 0.153846160\n",
+ "Epoch: 1100 avg_cer= 0.153846160\n",
+ "Epoch: 1101 avg_cer= 0.153846160\n",
+ "Epoch: 1102 avg_cer= 0.173076928\n",
+ "Epoch: 1103 avg_cer= 0.153846160\n",
+ "Epoch: 1104 avg_cer= 0.115384616\n",
+ "Epoch: 1105 avg_cer= 0.134615391\n",
+ "Epoch: 1106 avg_cer= 0.173076928\n",
+ "Epoch: 1107 avg_cer= 0.153846160\n",
+ "Epoch: 1108 avg_cer= 0.173076928\n",
+ "Epoch: 1109 avg_cer= 0.173076928\n",
+ "Epoch: 1110 avg_cer= 0.173076928\n",
+ "Epoch: 1111 avg_cer= 0.134615391\n",
+ "Epoch: 1112 avg_cer= 0.134615391\n",
+ "Epoch: 1113 avg_cer= 0.134615391\n",
+ "Epoch: 1114 avg_cer= 0.115384616\n",
+ "Epoch: 1115 avg_cer= 0.115384616\n",
+ "Epoch: 1116 avg_cer= 0.096153848\n",
+ "Epoch: 1117 avg_cer= 0.096153848\n",
+ "Epoch: 1118 avg_cer= 0.134615391\n",
+ "Epoch: 1119 avg_cer= 0.134615391\n",
+ "Epoch: 1120 avg_cer= 0.134615391\n",
+ "Epoch: 1121 avg_cer= 0.134615391\n",
+ "Epoch: 1122 avg_cer= 0.115384616\n",
+ "Epoch: 1123 avg_cer= 0.096153848\n",
+ "Epoch: 1124 avg_cer= 0.115384616\n",
+ "Epoch: 1125 avg_cer= 0.096153848\n",
+ "Epoch: 1126 avg_cer= 0.096153848\n",
+ "Epoch: 1127 avg_cer= 0.096153848\n",
+ "Epoch: 1128 avg_cer= 0.096153848\n",
+ "Epoch: 1129 avg_cer= 0.096153848\n",
+ "Epoch: 1130 avg_cer= 0.096153848\n",
+ "Epoch: 1131 avg_cer= 0.096153848\n",
+ "Epoch: 1132 avg_cer= 0.096153848\n",
+ "Epoch: 1133 avg_cer= 0.076923080\n",
+ "Epoch: 1134 avg_cer= 0.076923080\n",
+ "Epoch: 1135 avg_cer= 0.057692308\n",
+ "Epoch: 1136 avg_cer= 0.057692308\n",
+ "Epoch: 1137 avg_cer= 0.057692308\n",
+ "Epoch: 1138 avg_cer= 0.057692308\n",
+ "Epoch: 1139 avg_cer= 0.076923080\n",
+ "Epoch: 1140 avg_cer= 0.076923080\n",
+ "Epoch: 1141 avg_cer= 0.076923080\n",
+ "Epoch: 1142 avg_cer= 0.076923080\n",
+ "Epoch: 1143 avg_cer= 0.076923080\n",
+ "Epoch: 1144 avg_cer= 0.057692308\n",
+ "Epoch: 1145 avg_cer= 0.057692308\n",
+ "Epoch: 1146 avg_cer= 0.076923080\n",
+ "Epoch: 1147 avg_cer= 0.076923080\n",
+ "Epoch: 1148 avg_cer= 0.076923080\n",
+ "Epoch: 1149 avg_cer= 0.076923080\n",
+ "Epoch: 1150 avg_cer= 0.057692308\n",
+ "Epoch: 1151 avg_cer= 0.057692308\n",
+ "Epoch: 1152 avg_cer= 0.057692308\n",
+ "Epoch: 1153 avg_cer= 0.057692308\n",
+ "Epoch: 1154 avg_cer= 0.057692308\n",
+ "Epoch: 1155 avg_cer= 0.038461540\n",
+ "Epoch: 1156 avg_cer= 0.038461540\n",
+ "Epoch: 1157 avg_cer= 0.038461540\n",
+ "Epoch: 1158 avg_cer= 0.019230770\n",
+ "Epoch: 1159 avg_cer= 0.019230770\n",
+ "Epoch: 1160 avg_cer= 0.019230770\n",
+ "Epoch: 1161 avg_cer= 0.019230770\n",
+ "Epoch: 1162 avg_cer= 0.038461540\n",
+ "Epoch: 1163 avg_cer= 0.057692308\n",
+ "Epoch: 1164 avg_cer= 0.038461540\n",
+ "Epoch: 1165 avg_cer= 0.038461540\n",
+ "Epoch: 1166 avg_cer= 0.057692308\n",
+ "Epoch: 1167 avg_cer= 0.038461540\n",
+ "Epoch: 1168 avg_cer= 0.038461540\n",
+ "Epoch: 1169 avg_cer= 0.038461540\n",
+ "Epoch: 1170 avg_cer= 0.038461540\n",
+ "Epoch: 1171 avg_cer= 0.038461540\n",
+ "Epoch: 1172 avg_cer= 0.038461540\n",
+ "Epoch: 1173 avg_cer= 0.038461540\n",
+ "Epoch: 1174 avg_cer= 0.038461540\n",
+ "Epoch: 1175 avg_cer= 0.019230770\n",
+ "Epoch: 1176 avg_cer= 0.038461540\n",
+ "Epoch: 1177 avg_cer= 0.019230770\n",
+ "Epoch: 1178 avg_cer= 0.019230770\n",
+ "Epoch: 1179 avg_cer= 0.038461540\n",
+ "Epoch: 1180 avg_cer= 0.019230770\n",
+ "Epoch: 1181 avg_cer= 0.038461540\n",
+ "Epoch: 1182 avg_cer= 0.038461540\n",
+ "Epoch: 1183 avg_cer= 0.038461540\n",
+ "Epoch: 1184 avg_cer= 0.038461540\n",
+ "Epoch: 1185 avg_cer= 0.019230770\n",
+ "Epoch: 1186 avg_cer= 0.019230770\n",
+ "Epoch: 1187 avg_cer= 0.019230770\n",
+ "Epoch: 1188 avg_cer= 0.019230770\n",
+ "Epoch: 1189 avg_cer= 0.019230770\n",
+ "Epoch: 1190 avg_cer= 0.038461540\n",
+ "Epoch: 1191 avg_cer= 0.038461540\n",
+ "Epoch: 1192 avg_cer= 0.038461540\n",
+ "Epoch: 1193 avg_cer= 0.038461540\n",
+ "Epoch: 1194 avg_cer= 0.038461540\n",
+ "Epoch: 1195 avg_cer= 0.057692308\n",
+ "Epoch: 1196 avg_cer= 0.019230770\n",
+ "Epoch: 1197 avg_cer= 0.038461540\n",
+ "Epoch: 1198 avg_cer= 0.038461540\n",
+ "Epoch: 1199 avg_cer= 0.019230770\n",
+ "Epoch: 1200 avg_cer= 0.019230770\n",
+ "Epoch: 1201 avg_cer= 0.019230770\n",
+ "Epoch: 1202 avg_cer= 0.019230770\n",
+ "Epoch: 1203 avg_cer= 0.019230770\n",
+ "Epoch: 1204 avg_cer= 0.038461540\n",
+ "Epoch: 1205 avg_cer= 0.057692308\n",
+ "Epoch: 1206 avg_cer= 0.057692308\n",
+ "Epoch: 1207 avg_cer= 0.057692308\n",
+ "Epoch: 1208 avg_cer= 0.038461540\n",
+ "Epoch: 1209 avg_cer= 0.038461540\n",
+ "Epoch: 1210 avg_cer= 0.038461540\n",
+ "Epoch: 1211 avg_cer= 0.019230770\n",
+ "Epoch: 1212 avg_cer= 0.019230770\n",
+ "Epoch: 1213 avg_cer= 0.019230770\n",
+ "Epoch: 1214 avg_cer= 0.019230770\n",
+ "Epoch: 1215 avg_cer= 0.019230770\n",
+ "Epoch: 1216 avg_cer= 0.019230770\n",
+ "Epoch: 1217 avg_cer= 0.019230770\n",
+ "Epoch: 1218 avg_cer= 0.019230770\n",
+ "Epoch: 1219 avg_cer= 0.019230770\n",
+ "Epoch: 1220 avg_cer= 0.019230770\n",
+ "Epoch: 1221 avg_cer= 0.019230770\n",
+ "Epoch: 1222 avg_cer= 0.019230770\n",
+ "Epoch: 1223 avg_cer= 0.019230770\n",
+ "Epoch: 1224 avg_cer= 0.019230770\n",
+ "Epoch: 1225 avg_cer= 0.019230770\n",
+ "Epoch: 1226 avg_cer= 0.019230770\n",
+ "Epoch: 1227 avg_cer= 0.019230770\n",
+ "Epoch: 1228 avg_cer= 0.019230770\n",
+ "Epoch: 1229 avg_cer= 0.019230770\n",
+ "Epoch: 1230 avg_cer= 0.019230770\n",
+ "Epoch: 1231 avg_cer= 0.019230770\n",
+ "Epoch: 1232 avg_cer= 0.019230770\n",
+ "Epoch: 1233 avg_cer= 0.019230770\n",
+ "Epoch: 1234 avg_cer= 0.019230770\n",
+ "Epoch: 1235 avg_cer= 0.019230770\n",
+ "Epoch: 1236 avg_cer= 0.019230770\n",
+ "Epoch: 1237 avg_cer= 0.019230770\n",
+ "Epoch: 1238 avg_cer= 0.019230770\n",
+ "Epoch: 1239 avg_cer= 0.019230770\n",
+ "Epoch: 1240 avg_cer= 0.019230770\n",
+ "Epoch: 1241 avg_cer= 0.019230770\n",
+ "Epoch: 1242 avg_cer= 0.019230770\n",
+ "Epoch: 1243 avg_cer= 0.019230770\n",
+ "Epoch: 1244 avg_cer= 0.019230770\n",
+ "Epoch: 1245 avg_cer= 0.019230770\n",
+ "Epoch: 1246 avg_cer= 0.019230770\n",
+ "Epoch: 1247 avg_cer= 0.019230770\n",
+ "Epoch: 1248 avg_cer= 0.019230770\n",
+ "Epoch: 1249 avg_cer= 0.019230770\n",
+ "Epoch: 1250 avg_cer= 0.019230770\n",
+ "Epoch: 1251 avg_cer= 0.019230770\n",
+ "Epoch: 1252 avg_cer= 0.019230770\n",
+ "Epoch: 1253 avg_cer= 0.019230770\n",
+ "Epoch: 1254 avg_cer= 0.019230770\n",
+ "Epoch: 1255 avg_cer= 0.019230770\n",
+ "Epoch: 1256 avg_cer= 0.019230770\n",
+ "Epoch: 1257 avg_cer= 0.019230770\n",
+ "Epoch: 1258 avg_cer= 0.019230770\n",
+ "Epoch: 1259 avg_cer= 0.038461540\n",
+ "Epoch: 1260 avg_cer= 0.019230770\n",
+ "Epoch: 1261 avg_cer= 0.019230770\n",
+ "Epoch: 1262 avg_cer= 0.019230770\n",
+ "Epoch: 1263 avg_cer= 0.019230770\n",
+ "Epoch: 1264 avg_cer= 0.019230770\n",
+ "Epoch: 1265 avg_cer= 0.038461540\n",
+ "Epoch: 1266 avg_cer= 0.038461540\n",
+ "Epoch: 1267 avg_cer= 0.038461540\n",
+ "Epoch: 1268 avg_cer= 0.038461540\n",
+ "Epoch: 1269 avg_cer= 0.038461540\n",
+ "Epoch: 1270 avg_cer= 0.019230770\n",
+ "Epoch: 1271 avg_cer= 0.019230770\n",
+ "Epoch: 1272 avg_cer= 0.019230770\n",
+ "Epoch: 1273 avg_cer= 0.019230770\n",
+ "Epoch: 1274 avg_cer= 0.019230770\n",
+ "Epoch: 1275 avg_cer= 0.019230770\n",
+ "Epoch: 1276 avg_cer= 0.019230770\n",
+ "Epoch: 1277 avg_cer= 0.019230770\n",
+ "Epoch: 1278 avg_cer= 0.019230770\n",
+ "Epoch: 1279 avg_cer= 0.038461540\n",
+ "Epoch: 1280 avg_cer= 0.038461540\n",
+ "Epoch: 1281 avg_cer= 0.038461540\n",
+ "Epoch: 1282 avg_cer= 0.038461540\n",
+ "Epoch: 1283 avg_cer= 0.038461540\n",
+ "Epoch: 1284 avg_cer= 0.038461540\n",
+ "Epoch: 1285 avg_cer= 0.019230770\n",
+ "Epoch: 1286 avg_cer= 0.019230770\n",
+ "Epoch: 1287 avg_cer= 0.019230770\n",
+ "Epoch: 1288 avg_cer= 0.019230770\n",
+ "Epoch: 1289 avg_cer= 0.019230770\n",
+ "Epoch: 1290 avg_cer= 0.019230770\n",
+ "Epoch: 1291 avg_cer= 0.019230770\n",
+ "Epoch: 1292 avg_cer= 0.019230770\n",
+ "Epoch: 1293 avg_cer= 0.019230770\n",
+ "Epoch: 1294 avg_cer= 0.019230770\n",
+ "Epoch: 1295 avg_cer= 0.019230770\n",
+ "Epoch: 1296 avg_cer= 0.038461540\n",
+ "Epoch: 1297 avg_cer= 0.038461540\n",
+ "Epoch: 1298 avg_cer= 0.019230770\n",
+ "Epoch: 1299 avg_cer= 0.019230770\n",
+ "Epoch: 1300 avg_cer= 0.019230770\n",
+ "Epoch: 1301 avg_cer= 0.019230770\n",
+ "Epoch: 1302 avg_cer= 0.019230770\n",
+ "Epoch: 1303 avg_cer= 0.019230770\n",
+ "Epoch: 1304 avg_cer= 0.019230770\n",
+ "Epoch: 1305 avg_cer= 0.019230770\n",
+ "Epoch: 1306 avg_cer= 0.019230770\n",
+ "Epoch: 1307 avg_cer= 0.019230770\n",
+ "Epoch: 1308 avg_cer= 0.019230770\n",
+ "Epoch: 1309 avg_cer= 0.019230770\n",
+ "Epoch: 1310 avg_cer= 0.019230770\n",
+ "Epoch: 1311 avg_cer= 0.019230770\n",
+ "Epoch: 1312 avg_cer= 0.019230770\n",
+ "Epoch: 1313 avg_cer= 0.019230770\n",
+ "Epoch: 1314 avg_cer= 0.019230770\n",
+ "Epoch: 1315 avg_cer= 0.019230770\n",
+ "Epoch: 1316 avg_cer= 0.019230770\n",
+ "Epoch: 1317 avg_cer= 0.019230770\n",
+ "Epoch: 1318 avg_cer= 0.019230770\n",
+ "Epoch: 1319 avg_cer= 0.019230770\n",
+ "Epoch: 1320 avg_cer= 0.019230770\n",
+ "Epoch: 1321 avg_cer= 0.019230770\n",
+ "Epoch: 1322 avg_cer= 0.019230770\n",
+ "Epoch: 1323 avg_cer= 0.019230770\n",
+ "Epoch: 1324 avg_cer= 0.057692308\n",
+ "Epoch: 1325 avg_cer= 0.019230770\n",
+ "Epoch: 1326 avg_cer= 0.019230770\n",
+ "Epoch: 1327 avg_cer= 0.019230770\n",
+ "Epoch: 1328 avg_cer= 0.019230770\n",
+ "Epoch: 1329 avg_cer= 0.019230770\n",
+ "Epoch: 1330 avg_cer= 0.019230770\n",
+ "Epoch: 1331 avg_cer= 0.019230770\n",
+ "Epoch: 1332 avg_cer= 0.019230770\n",
+ "Epoch: 1333 avg_cer= 0.019230770\n",
+ "Epoch: 1334 avg_cer= 0.019230770\n",
+ "Epoch: 1335 avg_cer= 0.019230770\n",
+ "Epoch: 1336 avg_cer= 0.019230770\n",
+ "Epoch: 1337 avg_cer= 0.019230770\n",
+ "Epoch: 1338 avg_cer= 0.019230770\n",
+ "Epoch: 1339 avg_cer= 0.019230770\n",
+ "Epoch: 1340 avg_cer= 0.019230770\n",
+ "Epoch: 1341 avg_cer= 0.019230770\n",
+ "Epoch: 1342 avg_cer= 0.019230770\n",
+ "Epoch: 1343 avg_cer= 0.019230770\n",
+ "Epoch: 1344 avg_cer= 0.019230770\n",
+ "Epoch: 1345 avg_cer= 0.019230770\n",
+ "Epoch: 1346 avg_cer= 0.019230770\n",
+ "Epoch: 1347 avg_cer= 0.019230770\n",
+ "Epoch: 1348 avg_cer= 0.019230770\n",
+ "Epoch: 1349 avg_cer= 0.019230770\n",
+ "Epoch: 1350 avg_cer= 0.019230770\n",
+ "Epoch: 1351 avg_cer= 0.019230770\n",
+ "Epoch: 1352 avg_cer= 0.019230770\n",
+ "Epoch: 1353 avg_cer= 0.019230770\n",
+ "Epoch: 1354 avg_cer= 0.019230770\n",
+ "Epoch: 1355 avg_cer= 0.019230770\n",
+ "Epoch: 1356 avg_cer= 0.019230770\n",
+ "Epoch: 1357 avg_cer= 0.019230770\n",
+ "Epoch: 1358 avg_cer= 0.019230770\n",
+ "Epoch: 1359 avg_cer= 0.019230770\n",
+ "Epoch: 1360 avg_cer= 0.019230770\n",
+ "Epoch: 1361 avg_cer= 0.019230770\n",
+ "Epoch: 1362 avg_cer= 0.019230770\n",
+ "Epoch: 1363 avg_cer= 0.019230770\n",
+ "Epoch: 1364 avg_cer= 0.019230770\n",
+ "Epoch: 1365 avg_cer= 0.019230770\n",
+ "Epoch: 1366 avg_cer= 0.019230770\n",
+ "Epoch: 1367 avg_cer= 0.019230770\n",
+ "Epoch: 1368 avg_cer= 0.019230770\n",
+ "Epoch: 1369 avg_cer= 0.019230770\n",
+ "Epoch: 1370 avg_cer= 0.019230770\n",
+ "Epoch: 1371 avg_cer= 0.019230770\n",
+ "Epoch: 1372 avg_cer= 0.019230770\n",
+ "Epoch: 1373 avg_cer= 0.019230770\n",
+ "Epoch: 1374 avg_cer= 0.019230770\n",
+ "Epoch: 1375 avg_cer= 0.019230770\n",
+ "Epoch: 1376 avg_cer= 0.019230770\n",
+ "Epoch: 1377 avg_cer= 0.019230770\n",
+ "Epoch: 1378 avg_cer= 0.019230770\n",
+ "Epoch: 1379 avg_cer= 0.019230770\n",
+ "Epoch: 1380 avg_cer= 0.019230770\n",
+ "Epoch: 1381 avg_cer= 0.019230770\n",
+ "Epoch: 1382 avg_cer= 0.019230770\n",
+ "Epoch: 1383 avg_cer= 0.019230770\n",
+ "Epoch: 1384 avg_cer= 0.019230770\n",
+ "Epoch: 1385 avg_cer= 0.019230770\n",
+ "Epoch: 1386 avg_cer= 0.019230770\n",
+ "Epoch: 1387 avg_cer= 0.019230770\n",
+ "Epoch: 1388 avg_cer= 0.019230770\n",
+ "Epoch: 1389 avg_cer= 0.019230770\n",
+ "Epoch: 1390 avg_cer= 0.019230770\n",
+ "Epoch: 1391 avg_cer= 0.019230770\n",
+ "Epoch: 1392 avg_cer= 0.019230770\n",
+ "Epoch: 1393 avg_cer= 0.019230770\n",
+ "Epoch: 1394 avg_cer= 0.019230770\n",
+ "Epoch: 1395 avg_cer= 0.019230770\n",
+ "Epoch: 1396 avg_cer= 0.019230770\n",
+ "Epoch: 1397 avg_cer= 0.019230770\n",
+ "Epoch: 1398 avg_cer= 0.019230770\n",
+ "Epoch: 1399 avg_cer= 0.019230770\n",
+ "Epoch: 1400 avg_cer= 0.019230770\n",
+ "Epoch: 1401 avg_cer= 0.019230770\n",
+ "Epoch: 1402 avg_cer= 0.019230770\n",
+ "Epoch: 1403 avg_cer= 0.019230770\n",
+ "Epoch: 1404 avg_cer= 0.019230770\n",
+ "Epoch: 1405 avg_cer= 0.019230770\n",
+ "Epoch: 1406 avg_cer= 0.019230770\n",
+ "Epoch: 1407 avg_cer= 0.019230770\n",
+ "Epoch: 1408 avg_cer= 0.019230770\n",
+ "Epoch: 1409 avg_cer= 0.019230770\n",
+ "Epoch: 1410 avg_cer= 0.019230770\n",
+ "Epoch: 1411 avg_cer= 0.019230770\n",
+ "Epoch: 1412 avg_cer= 0.019230770\n",
+ "Epoch: 1413 avg_cer= 0.019230770\n",
+ "Epoch: 1414 avg_cer= 0.019230770\n",
+ "Epoch: 1415 avg_cer= 0.019230770\n",
+ "Epoch: 1416 avg_cer= 0.019230770\n",
+ "Epoch: 1417 avg_cer= 0.019230770\n",
+ "Epoch: 1418 avg_cer= 0.019230770\n",
+ "Epoch: 1419 avg_cer= 0.019230770\n",
+ "Epoch: 1420 avg_cer= 0.019230770\n",
+ "Epoch: 1421 avg_cer= 0.019230770\n",
+ "Epoch: 1422 avg_cer= 0.019230770\n",
+ "Epoch: 1423 avg_cer= 0.019230770\n",
+ "Epoch: 1424 avg_cer= 0.019230770\n",
+ "Epoch: 1425 avg_cer= 0.019230770\n",
+ "Epoch: 1426 avg_cer= 0.019230770\n",
+ "Epoch: 1427 avg_cer= 0.019230770\n",
+ "Epoch: 1428 avg_cer= 0.019230770\n",
+ "Epoch: 1429 avg_cer= 0.019230770\n",
+ "Epoch: 1430 avg_cer= 0.019230770\n",
+ "Epoch: 1431 avg_cer= 0.019230770\n",
+ "Epoch: 1432 avg_cer= 0.019230770\n",
+ "Epoch: 1433 avg_cer= 0.019230770\n",
+ "Epoch: 1434 avg_cer= 0.019230770\n",
+ "Epoch: 1435 avg_cer= 0.019230770\n",
+ "Epoch: 1436 avg_cer= 0.019230770\n",
+ "Epoch: 1437 avg_cer= 0.019230770\n",
+ "Epoch: 1438 avg_cer= 0.019230770\n",
+ "Epoch: 1439 avg_cer= 0.019230770\n",
+ "Epoch: 1440 avg_cer= 0.019230770\n",
+ "Epoch: 1441 avg_cer= 0.019230770\n",
+ "Epoch: 1442 avg_cer= 0.019230770\n",
+ "Epoch: 1443 avg_cer= 0.019230770\n",
+ "Epoch: 1444 avg_cer= 0.019230770\n",
+ "Epoch: 1445 avg_cer= 0.019230770\n",
+ "Epoch: 1446 avg_cer= 0.019230770\n",
+ "Epoch: 1447 avg_cer= 0.019230770\n",
+ "Epoch: 1448 avg_cer= 0.019230770\n",
+ "Epoch: 1449 avg_cer= 0.019230770\n",
+ "Epoch: 1450 avg_cer= 0.019230770\n",
+ "Epoch: 1451 avg_cer= 0.019230770\n",
+ "Epoch: 1452 avg_cer= 0.019230770\n",
+ "Epoch: 1453 avg_cer= 0.019230770\n",
+ "Epoch: 1454 avg_cer= 0.019230770\n",
+ "Epoch: 1455 avg_cer= 0.019230770\n",
+ "Epoch: 1456 avg_cer= 0.019230770\n",
+ "Epoch: 1457 avg_cer= 0.019230770\n",
+ "Epoch: 1458 avg_cer= 0.019230770\n",
+ "Epoch: 1459 avg_cer= 0.019230770\n",
+ "Epoch: 1460 avg_cer= 0.019230770\n",
+ "Epoch: 1461 avg_cer= 0.019230770\n",
+ "Epoch: 1462 avg_cer= 0.019230770\n",
+ "Epoch: 1463 avg_cer= 0.019230770\n",
+ "Epoch: 1464 avg_cer= 0.019230770\n",
+ "Epoch: 1465 avg_cer= 0.019230770\n",
+ "Epoch: 1466 avg_cer= 0.019230770\n",
+ "Epoch: 1467 avg_cer= 0.019230770\n",
+ "Epoch: 1468 avg_cer= 0.019230770\n",
+ "Epoch: 1469 avg_cer= 0.019230770\n",
+ "Epoch: 1470 avg_cer= 0.019230770\n",
+ "Epoch: 1471 avg_cer= 0.019230770\n",
+ "Epoch: 1472 avg_cer= 0.019230770\n",
+ "Epoch: 1473 avg_cer= 0.019230770\n",
+ "Epoch: 1474 avg_cer= 0.019230770\n",
+ "Epoch: 1475 avg_cer= 0.019230770\n",
+ "Epoch: 1476 avg_cer= 0.019230770\n",
+ "Epoch: 1477 avg_cer= 0.019230770\n",
+ "Epoch: 1478 avg_cer= 0.019230770\n",
+ "Epoch: 1479 avg_cer= 0.019230770\n",
+ "Epoch: 1480 avg_cer= 0.019230770\n",
+ "Epoch: 1481 avg_cer= 0.019230770\n",
+ "Epoch: 1482 avg_cer= 0.019230770\n",
+ "Epoch: 1483 avg_cer= 0.019230770\n",
+ "Epoch: 1484 avg_cer= 0.019230770\n",
+ "Epoch: 1485 avg_cer= 0.019230770\n",
+ "Epoch: 1486 avg_cer= 0.019230770\n",
+ "Epoch: 1487 avg_cer= 0.019230770\n",
+ "Epoch: 1488 avg_cer= 0.019230770\n",
+ "Epoch: 1489 avg_cer= 0.019230770\n",
+ "Epoch: 1490 avg_cer= 0.019230770\n",
+ "Epoch: 1491 avg_cer= 0.019230770\n",
+ "Epoch: 1492 avg_cer= 0.019230770\n",
+ "Epoch: 1493 avg_cer= 0.019230770\n",
+ "Epoch: 1494 avg_cer= 0.019230770\n",
+ "Epoch: 1495 avg_cer= 0.019230770\n",
+ "Epoch: 1496 avg_cer= 0.019230770\n",
+ "Epoch: 1497 avg_cer= 0.019230770\n",
+ "Epoch: 1498 avg_cer= 0.019230770\n",
+ "Epoch: 1499 avg_cer= 0.019230770\n",
+ "Epoch: 1500 avg_cer= 0.019230770\n",
+ "Epoch: 1501 avg_cer= 0.019230770\n",
+ "Epoch: 1502 avg_cer= 0.019230770\n",
+ "Epoch: 1503 avg_cer= 0.019230770\n",
+ "Epoch: 1504 avg_cer= 0.019230770\n",
+ "Epoch: 1505 avg_cer= 0.019230770\n",
+ "Epoch: 1506 avg_cer= 0.019230770\n",
+ "Epoch: 1507 avg_cer= 0.019230770\n",
+ "Epoch: 1508 avg_cer= 0.019230770\n",
+ "Epoch: 1509 avg_cer= 0.019230770\n",
+ "Epoch: 1510 avg_cer= 0.019230770\n",
+ "Epoch: 1511 avg_cer= 0.019230770\n",
+ "Epoch: 1512 avg_cer= 0.019230770\n",
+ "Epoch: 1513 avg_cer= 0.019230770\n",
+ "Epoch: 1514 avg_cer= 0.019230770\n",
+ "Epoch: 1515 avg_cer= 0.019230770\n",
+ "Epoch: 1516 avg_cer= 0.019230770\n",
+ "Epoch: 1517 avg_cer= 0.019230770\n",
+ "Epoch: 1518 avg_cer= 0.019230770\n",
+ "Epoch: 1519 avg_cer= 0.019230770\n",
+ "Epoch: 1520 avg_cer= 0.019230770\n",
+ "Epoch: 1521 avg_cer= 0.019230770\n",
+ "Epoch: 1522 avg_cer= 0.019230770\n",
+ "Epoch: 1523 avg_cer= 0.019230770\n",
+ "Epoch: 1524 avg_cer= 0.019230770\n",
+ "Epoch: 1525 avg_cer= 0.019230770\n",
+ "Epoch: 1526 avg_cer= 0.019230770\n",
+ "Epoch: 1527 avg_cer= 0.019230770\n",
+ "Epoch: 1528 avg_cer= 0.019230770\n",
+ "Epoch: 1529 avg_cer= 0.019230770\n",
+ "Epoch: 1530 avg_cer= 0.019230770\n",
+ "Epoch: 1531 avg_cer= 0.019230770\n",
+ "Epoch: 1532 avg_cer= 0.019230770\n",
+ "Epoch: 1533 avg_cer= 0.019230770\n",
+ "Epoch: 1534 avg_cer= 0.019230770\n",
+ "Epoch: 1535 avg_cer= 0.019230770\n",
+ "Epoch: 1536 avg_cer= 0.019230770\n",
+ "Epoch: 1537 avg_cer= 0.019230770\n",
+ "Epoch: 1538 avg_cer= 0.019230770\n",
+ "Epoch: 1539 avg_cer= 0.019230770\n",
+ "Epoch: 1540 avg_cer= 0.019230770\n",
+ "Epoch: 1541 avg_cer= 0.019230770\n",
+ "Epoch: 1542 avg_cer= 0.019230770\n",
+ "Epoch: 1543 avg_cer= 0.019230770\n",
+ "Epoch: 1544 avg_cer= 0.019230770\n",
+ "Epoch: 1545 avg_cer= 0.019230770\n",
+ "Epoch: 1546 avg_cer= 0.019230770\n",
+ "Epoch: 1547 avg_cer= 0.019230770\n",
+ "Epoch: 1548 avg_cer= 0.019230770\n",
+ "Epoch: 1549 avg_cer= 0.019230770\n",
+ "Epoch: 1550 avg_cer= 0.019230770\n",
+ "Epoch: 1551 avg_cer= 0.019230770\n",
+ "Epoch: 1552 avg_cer= 0.019230770\n",
+ "Epoch: 1553 avg_cer= 0.019230770\n",
+ "Epoch: 1554 avg_cer= 0.019230770\n",
+ "Epoch: 1555 avg_cer= 0.019230770\n",
+ "Epoch: 1556 avg_cer= 0.019230770\n",
+ "Epoch: 1557 avg_cer= 0.019230770\n",
+ "Epoch: 1558 avg_cer= 0.019230770\n",
+ "Epoch: 1559 avg_cer= 0.019230770\n",
+ "Epoch: 1560 avg_cer= 0.019230770\n",
+ "Epoch: 1561 avg_cer= 0.019230770\n",
+ "Epoch: 1562 avg_cer= 0.019230770\n",
+ "Epoch: 1563 avg_cer= 0.019230770\n",
+ "Epoch: 1564 avg_cer= 0.019230770\n",
+ "Epoch: 1565 avg_cer= 0.019230770\n",
+ "Epoch: 1566 avg_cer= 0.019230770\n",
+ "Epoch: 1567 avg_cer= 0.019230770\n",
+ "Epoch: 1568 avg_cer= 0.019230770\n",
+ "Epoch: 1569 avg_cer= 0.019230770\n",
+ "Epoch: 1570 avg_cer= 0.019230770\n",
+ "Epoch: 1571 avg_cer= 0.019230770\n",
+ "Epoch: 1572 avg_cer= 0.019230770\n",
+ "Epoch: 1573 avg_cer= 0.019230770\n",
+ "Epoch: 1574 avg_cer= 0.019230770\n",
+ "Epoch: 1575 avg_cer= 0.019230770\n",
+ "Epoch: 1576 avg_cer= 0.019230770\n",
+ "Epoch: 1577 avg_cer= 0.019230770\n",
+ "Epoch: 1578 avg_cer= 0.019230770\n",
+ "Epoch: 1579 avg_cer= 0.019230770\n",
+ "Epoch: 1580 avg_cer= 0.019230770\n",
+ "Epoch: 1581 avg_cer= 0.019230770\n",
+ "Epoch: 1582 avg_cer= 0.019230770\n",
+ "Epoch: 1583 avg_cer= 0.019230770\n",
+ "Epoch: 1584 avg_cer= 0.019230770\n",
+ "Epoch: 1585 avg_cer= 0.019230770\n",
+ "Epoch: 1586 avg_cer= 0.019230770\n",
+ "Epoch: 1587 avg_cer= 0.019230770\n",
+ "Epoch: 1588 avg_cer= 0.019230770\n",
+ "Epoch: 1589 avg_cer= 0.019230770\n",
+ "Epoch: 1590 avg_cer= 0.019230770\n",
+ "Epoch: 1591 avg_cer= 0.019230770\n",
+ "Epoch: 1592 avg_cer= 0.019230770\n",
+ "Epoch: 1593 avg_cer= 0.019230770\n",
+ "Epoch: 1594 avg_cer= 0.019230770\n",
+ "Epoch: 1595 avg_cer= 0.019230770\n",
+ "Epoch: 1596 avg_cer= 0.019230770\n",
+ "Epoch: 1597 avg_cer= 0.019230770\n",
+ "Epoch: 1598 avg_cer= 0.019230770\n",
+ "Epoch: 1599 avg_cer= 0.019230770\n",
+ "Epoch: 1600 avg_cer= 0.019230770\n",
+ "Epoch: 1601 avg_cer= 0.019230770\n",
+ "Epoch: 1602 avg_cer= 0.019230770\n",
+ "Epoch: 1603 avg_cer= 0.019230770\n",
+ "Epoch: 1604 avg_cer= 0.019230770\n",
+ "Epoch: 1605 avg_cer= 0.019230770\n",
+ "Epoch: 1606 avg_cer= 0.019230770\n",
+ "Epoch: 1607 avg_cer= 0.019230770\n",
+ "Epoch: 1608 avg_cer= 0.019230770\n",
+ "Epoch: 1609 avg_cer= 0.019230770\n",
+ "Epoch: 1610 avg_cer= 0.019230770\n",
+ "Epoch: 1611 avg_cer= 0.019230770\n",
+ "Epoch: 1612 avg_cer= 0.019230770\n",
+ "Epoch: 1613 avg_cer= 0.019230770\n",
+ "Epoch: 1614 avg_cer= 0.019230770\n",
+ "Epoch: 1615 avg_cer= 0.019230770\n",
+ "Epoch: 1616 avg_cer= 0.019230770\n",
+ "Epoch: 1617 avg_cer= 0.019230770\n",
+ "Epoch: 1618 avg_cer= 0.019230770\n",
+ "Epoch: 1619 avg_cer= 0.019230770\n",
+ "Epoch: 1620 avg_cer= 0.019230770\n",
+ "Epoch: 1621 avg_cer= 0.019230770\n",
+ "Epoch: 1622 avg_cer= 0.019230770\n",
+ "Epoch: 1623 avg_cer= 0.019230770\n",
+ "Epoch: 1624 avg_cer= 0.019230770\n",
+ "Epoch: 1625 avg_cer= 0.019230770\n",
+ "Epoch: 1626 avg_cer= 0.019230770\n",
+ "Epoch: 1627 avg_cer= 0.019230770\n",
+ "Epoch: 1628 avg_cer= 0.019230770\n",
+ "Epoch: 1629 avg_cer= 0.019230770\n",
+ "Epoch: 1630 avg_cer= 0.019230770\n",
+ "Epoch: 1631 avg_cer= 0.019230770\n",
+ "Epoch: 1632 avg_cer= 0.019230770\n",
+ "Epoch: 1633 avg_cer= 0.019230770\n",
+ "Epoch: 1634 avg_cer= 0.019230770\n",
+ "Epoch: 1635 avg_cer= 0.019230770\n",
+ "Epoch: 1636 avg_cer= 0.019230770\n",
+ "Epoch: 1637 avg_cer= 0.019230770\n",
+ "Epoch: 1638 avg_cer= 0.019230770\n",
+ "Epoch: 1639 avg_cer= 0.019230770\n",
+ "Epoch: 1640 avg_cer= 0.019230770\n",
+ "Epoch: 1641 avg_cer= 0.019230770\n",
+ "Epoch: 1642 avg_cer= 0.019230770\n",
+ "Epoch: 1643 avg_cer= 0.019230770\n",
+ "Epoch: 1644 avg_cer= 0.019230770\n",
+ "Epoch: 1645 avg_cer= 0.019230770\n",
+ "Epoch: 1646 avg_cer= 0.019230770\n",
+ "Epoch: 1647 avg_cer= 0.019230770\n",
+ "Epoch: 1648 avg_cer= 0.019230770\n",
+ "Epoch: 1649 avg_cer= 0.019230770\n",
+ "Epoch: 1650 avg_cer= 0.019230770\n",
+ "Epoch: 1651 avg_cer= 0.019230770\n",
+ "Epoch: 1652 avg_cer= 0.019230770\n",
+ "Epoch: 1653 avg_cer= 0.019230770\n",
+ "Epoch: 1654 avg_cer= 0.019230770\n",
+ "Epoch: 1655 avg_cer= 0.019230770\n",
+ "Epoch: 1656 avg_cer= 0.019230770\n",
+ "Epoch: 1657 avg_cer= 0.019230770\n",
+ "Epoch: 1658 avg_cer= 0.019230770\n",
+ "Epoch: 1659 avg_cer= 0.019230770\n",
+ "Epoch: 1660 avg_cer= 0.019230770\n",
+ "Epoch: 1661 avg_cer= 0.019230770\n",
+ "Epoch: 1662 avg_cer= 0.019230770\n",
+ "Epoch: 1663 avg_cer= 0.019230770\n",
+ "Epoch: 1664 avg_cer= 0.019230770\n",
+ "Epoch: 1665 avg_cer= 0.019230770\n",
+ "Epoch: 1666 avg_cer= 0.019230770\n",
+ "Epoch: 1667 avg_cer= 0.019230770\n",
+ "Epoch: 1668 avg_cer= 0.019230770\n",
+ "Epoch: 1669 avg_cer= 0.019230770\n",
+ "Epoch: 1670 avg_cer= 0.019230770\n",
+ "Epoch: 1671 avg_cer= 0.019230770\n",
+ "Epoch: 1672 avg_cer= 0.019230770\n",
+ "Epoch: 1673 avg_cer= 0.019230770\n",
+ "Epoch: 1674 avg_cer= 0.019230770\n",
+ "Epoch: 1675 avg_cer= 0.019230770\n",
+ "Epoch: 1676 avg_cer= 0.019230770\n",
+ "Epoch: 1677 avg_cer= 0.019230770\n",
+ "Epoch: 1678 avg_cer= 0.019230770\n",
+ "Epoch: 1679 avg_cer= 0.019230770\n",
+ "Epoch: 1680 avg_cer= 0.019230770\n",
+ "Epoch: 1681 avg_cer= 0.019230770\n",
+ "Epoch: 1682 avg_cer= 0.019230770\n",
+ "Epoch: 1683 avg_cer= 0.019230770\n",
+ "Epoch: 1684 avg_cer= 0.019230770\n",
+ "Epoch: 1685 avg_cer= 0.019230770\n",
+ "Epoch: 1686 avg_cer= 0.019230770\n",
+ "Epoch: 1687 avg_cer= 0.019230770\n",
+ "Epoch: 1688 avg_cer= 0.019230770\n",
+ "Epoch: 1689 avg_cer= 0.019230770\n",
+ "Epoch: 1690 avg_cer= 0.019230770\n",
+ "Epoch: 1691 avg_cer= 0.019230770\n",
+ "Epoch: 1692 avg_cer= 0.019230770\n",
+ "Epoch: 1693 avg_cer= 0.019230770\n",
+ "Epoch: 1694 avg_cer= 0.019230770\n",
+ "Epoch: 1695 avg_cer= 0.019230770\n",
+ "Epoch: 1696 avg_cer= 0.019230770\n",
+ "Epoch: 1697 avg_cer= 0.019230770\n",
+ "Epoch: 1698 avg_cer= 0.019230770\n",
+ "Epoch: 1699 avg_cer= 0.019230770\n",
+ "Epoch: 1700 avg_cer= 0.019230770\n",
+ "Epoch: 1701 avg_cer= 0.019230770\n",
+ "Epoch: 1702 avg_cer= 0.019230770\n",
+ "Epoch: 1703 avg_cer= 0.019230770\n",
+ "Epoch: 1704 avg_cer= 0.019230770\n",
+ "Epoch: 1705 avg_cer= 0.019230770\n",
+ "Epoch: 1706 avg_cer= 0.019230770\n",
+ "Epoch: 1707 avg_cer= 0.019230770\n",
+ "Epoch: 1708 avg_cer= 0.019230770\n",
+ "Epoch: 1709 avg_cer= 0.019230770\n",
+ "Epoch: 1710 avg_cer= 0.019230770\n",
+ "Epoch: 1711 avg_cer= 0.019230770\n",
+ "Epoch: 1712 avg_cer= 0.019230770\n",
+ "Epoch: 1713 avg_cer= 0.019230770\n",
+ "Epoch: 1714 avg_cer= 0.019230770\n",
+ "Epoch: 1715 avg_cer= 0.019230770\n",
+ "Epoch: 1716 avg_cer= 0.019230770\n",
+ "Epoch: 1717 avg_cer= 0.019230770\n",
+ "Epoch: 1718 avg_cer= 0.019230770\n",
+ "Epoch: 1719 avg_cer= 0.019230770\n",
+ "Epoch: 1720 avg_cer= 0.019230770\n",
+ "Epoch: 1721 avg_cer= 0.019230770\n",
+ "Epoch: 1722 avg_cer= 0.019230770\n",
+ "Epoch: 1723 avg_cer= 0.019230770\n",
+ "Epoch: 1724 avg_cer= 0.019230770\n",
+ "Epoch: 1725 avg_cer= 0.019230770\n",
+ "Epoch: 1726 avg_cer= 0.019230770\n",
+ "Epoch: 1727 avg_cer= 0.019230770\n",
+ "Epoch: 1728 avg_cer= 0.019230770\n",
+ "Epoch: 1729 avg_cer= 0.019230770\n",
+ "Epoch: 1730 avg_cer= 0.019230770\n",
+ "Epoch: 1731 avg_cer= 0.019230770\n",
+ "Epoch: 1732 avg_cer= 0.019230770\n",
+ "Epoch: 1733 avg_cer= 0.019230770\n",
+ "Epoch: 1734 avg_cer= 0.019230770\n",
+ "Epoch: 1735 avg_cer= 0.019230770\n",
+ "Epoch: 1736 avg_cer= 0.019230770\n",
+ "Epoch: 1737 avg_cer= 0.019230770\n",
+ "Epoch: 1738 avg_cer= 0.019230770\n",
+ "Epoch: 1739 avg_cer= 0.019230770\n",
+ "Epoch: 1740 avg_cer= 0.019230770\n",
+ "Epoch: 1741 avg_cer= 0.019230770\n",
+ "Epoch: 1742 avg_cer= 0.019230770\n",
+ "Epoch: 1743 avg_cer= 0.019230770\n",
+ "Epoch: 1744 avg_cer= 0.019230770\n",
+ "Epoch: 1745 avg_cer= 0.019230770\n",
+ "Epoch: 1746 avg_cer= 0.019230770\n",
+ "Epoch: 1747 avg_cer= 0.019230770\n",
+ "Epoch: 1748 avg_cer= 0.019230770\n",
+ "Epoch: 1749 avg_cer= 0.019230770\n",
+ "Epoch: 1750 avg_cer= 0.019230770\n",
+ "Epoch: 1751 avg_cer= 0.019230770\n",
+ "Epoch: 1752 avg_cer= 0.019230770\n",
+ "Epoch: 1753 avg_cer= 0.019230770\n",
+ "Epoch: 1754 avg_cer= 0.019230770\n",
+ "Epoch: 1755 avg_cer= 0.019230770\n",
+ "Epoch: 1756 avg_cer= 0.019230770\n",
+ "Epoch: 1757 avg_cer= 0.019230770\n",
+ "Epoch: 1758 avg_cer= 0.019230770\n",
+ "Epoch: 1759 avg_cer= 0.019230770\n",
+ "Epoch: 1760 avg_cer= 0.019230770\n",
+ "Epoch: 1761 avg_cer= 0.019230770\n",
+ "Epoch: 1762 avg_cer= 0.019230770\n",
+ "Epoch: 1763 avg_cer= 0.019230770\n",
+ "Epoch: 1764 avg_cer= 0.019230770\n",
+ "Epoch: 1765 avg_cer= 0.019230770\n",
+ "Epoch: 1766 avg_cer= 0.019230770\n",
+ "Epoch: 1767 avg_cer= 0.019230770\n",
+ "Epoch: 1768 avg_cer= 0.019230770\n",
+ "Epoch: 1769 avg_cer= 0.019230770\n",
+ "Epoch: 1770 avg_cer= 0.019230770\n",
+ "Epoch: 1771 avg_cer= 0.019230770\n",
+ "Epoch: 1772 avg_cer= 0.019230770\n",
+ "Epoch: 1773 avg_cer= 0.019230770\n",
+ "Epoch: 1774 avg_cer= 0.019230770\n",
+ "Epoch: 1775 avg_cer= 0.019230770\n",
+ "Epoch: 1776 avg_cer= 0.019230770\n",
+ "Epoch: 1777 avg_cer= 0.019230770\n",
+ "Epoch: 1778 avg_cer= 0.019230770\n",
+ "Epoch: 1779 avg_cer= 0.019230770\n",
+ "Epoch: 1780 avg_cer= 0.019230770\n",
+ "Epoch: 1781 avg_cer= 0.019230770\n",
+ "Epoch: 1782 avg_cer= 0.019230770\n",
+ "Epoch: 1783 avg_cer= 0.019230770\n",
+ "Epoch: 1784 avg_cer= 0.019230770\n",
+ "Epoch: 1785 avg_cer= 0.019230770\n",
+ "Epoch: 1786 avg_cer= 0.019230770\n",
+ "Epoch: 1787 avg_cer= 0.019230770\n",
+ "Epoch: 1788 avg_cer= 0.019230770\n",
+ "Epoch: 1789 avg_cer= 0.019230770\n",
+ "Epoch: 1790 avg_cer= 0.019230770\n",
+ "Epoch: 1791 avg_cer= 0.019230770\n",
+ "Epoch: 1792 avg_cer= 0.019230770\n",
+ "Epoch: 1793 avg_cer= 0.019230770\n",
+ "Epoch: 1794 avg_cer= 0.019230770\n",
+ "Epoch: 1795 avg_cer= 0.019230770\n",
+ "Epoch: 1796 avg_cer= 0.019230770\n",
+ "Epoch: 1797 avg_cer= 0.019230770\n",
+ "Epoch: 1798 avg_cer= 0.019230770\n",
+ "Epoch: 1799 avg_cer= 0.019230770\n",
+ "Epoch: 1800 avg_cer= 0.019230770\n",
+ "Epoch: 1801 avg_cer= 0.019230770\n",
+ "Epoch: 1802 avg_cer= 0.019230770\n",
+ "Epoch: 1803 avg_cer= 0.019230770\n",
+ "Epoch: 1804 avg_cer= 0.019230770\n",
+ "Epoch: 1805 avg_cer= 0.019230770\n",
+ "Epoch: 1806 avg_cer= 0.019230770\n",
+ "Epoch: 1807 avg_cer= 0.019230770\n",
+ "Epoch: 1808 avg_cer= 0.019230770\n",
+ "Epoch: 1809 avg_cer= 0.019230770\n",
+ "Epoch: 1810 avg_cer= 0.019230770\n",
+ "Epoch: 1811 avg_cer= 0.019230770\n",
+ "Epoch: 1812 avg_cer= 0.019230770\n",
+ "Epoch: 1813 avg_cer= 0.019230770\n",
+ "Epoch: 1814 avg_cer= 0.019230770\n",
+ "Epoch: 1815 avg_cer= 0.019230770\n",
+ "Epoch: 1816 avg_cer= 0.019230770\n",
+ "Epoch: 1817 avg_cer= 0.019230770\n",
+ "Epoch: 1818 avg_cer= 0.019230770\n",
+ "Epoch: 1819 avg_cer= 0.019230770\n",
+ "Epoch: 1820 avg_cer= 0.019230770\n",
+ "Epoch: 1821 avg_cer= 0.019230770\n",
+ "Epoch: 1822 avg_cer= 0.019230770\n",
+ "Epoch: 1823 avg_cer= 0.019230770\n",
+ "Epoch: 1824 avg_cer= 0.019230770\n",
+ "Epoch: 1825 avg_cer= 0.019230770\n",
+ "Epoch: 1826 avg_cer= 0.019230770\n",
+ "Epoch: 1827 avg_cer= 0.019230770\n",
+ "Epoch: 1828 avg_cer= 0.019230770\n",
+ "Epoch: 1829 avg_cer= 0.019230770\n",
+ "Epoch: 1830 avg_cer= 0.019230770\n",
+ "Epoch: 1831 avg_cer= 0.019230770\n",
+ "Epoch: 1832 avg_cer= 0.019230770\n",
+ "Epoch: 1833 avg_cer= 0.019230770\n",
+ "Epoch: 1834 avg_cer= 0.019230770\n",
+ "Epoch: 1835 avg_cer= 0.019230770\n",
+ "Epoch: 1836 avg_cer= 0.019230770\n",
+ "Epoch: 1837 avg_cer= 0.019230770\n",
+ "Epoch: 1838 avg_cer= 0.019230770\n",
+ "Epoch: 1839 avg_cer= 0.019230770\n",
+ "Epoch: 1840 avg_cer= 0.019230770\n",
+ "Epoch: 1841 avg_cer= 0.019230770\n",
+ "Epoch: 1842 avg_cer= 0.019230770\n",
+ "Epoch: 1843 avg_cer= 0.019230770\n",
+ "Epoch: 1844 avg_cer= 0.019230770\n",
+ "Epoch: 1845 avg_cer= 0.019230770\n",
+ "Epoch: 1846 avg_cer= 0.019230770\n",
+ "Epoch: 1847 avg_cer= 0.019230770\n",
+ "Epoch: 1848 avg_cer= 0.019230770\n",
+ "Epoch: 1849 avg_cer= 0.019230770\n",
+ "Epoch: 1850 avg_cer= 0.019230770\n",
+ "Epoch: 1851 avg_cer= 0.019230770\n",
+ "Epoch: 1852 avg_cer= 0.019230770\n",
+ "Epoch: 1853 avg_cer= 0.019230770\n",
+ "Epoch: 1854 avg_cer= 0.019230770\n",
+ "Epoch: 1855 avg_cer= 0.019230770\n",
+ "Epoch: 1856 avg_cer= 0.019230770\n",
+ "Epoch: 1857 avg_cer= 0.019230770\n",
+ "Epoch: 1858 avg_cer= 0.019230770\n",
+ "Epoch: 1859 avg_cer= 0.019230770\n",
+ "Epoch: 1860 avg_cer= 0.019230770\n",
+ "Epoch: 1861 avg_cer= 0.019230770\n",
+ "Epoch: 1862 avg_cer= 0.019230770\n",
+ "Epoch: 1863 avg_cer= 0.019230770\n",
+ "Epoch: 1864 avg_cer= 0.019230770\n",
+ "Epoch: 1865 avg_cer= 0.019230770\n",
+ "Epoch: 1866 avg_cer= 0.019230770\n",
+ "Epoch: 1867 avg_cer= 0.019230770\n",
+ "Epoch: 1868 avg_cer= 0.019230770\n",
+ "Epoch: 1869 avg_cer= 0.019230770\n",
+ "Epoch: 1870 avg_cer= 0.019230770\n",
+ "Epoch: 1871 avg_cer= 0.019230770\n",
+ "Epoch: 1872 avg_cer= 0.019230770\n",
+ "Epoch: 1873 avg_cer= 0.019230770\n",
+ "Epoch: 1874 avg_cer= 0.019230770\n",
+ "Epoch: 1875 avg_cer= 0.019230770\n",
+ "Epoch: 1876 avg_cer= 0.019230770\n",
+ "Epoch: 1877 avg_cer= 0.019230770\n",
+ "Epoch: 1878 avg_cer= 0.019230770\n",
+ "Epoch: 1879 avg_cer= 0.019230770\n",
+ "Epoch: 1880 avg_cer= 0.019230770\n",
+ "Epoch: 1881 avg_cer= 0.019230770\n",
+ "Epoch: 1882 avg_cer= 0.019230770\n",
+ "Epoch: 1883 avg_cer= 0.019230770\n",
+ "Epoch: 1884 avg_cer= 0.019230770\n",
+ "Epoch: 1885 avg_cer= 0.019230770\n",
+ "Epoch: 1886 avg_cer= 0.019230770\n",
+ "Epoch: 1887 avg_cer= 0.019230770\n",
+ "Epoch: 1888 avg_cer= 0.019230770\n",
+ "Epoch: 1889 avg_cer= 0.019230770\n",
+ "Epoch: 1890 avg_cer= 0.019230770\n",
+ "Epoch: 1891 avg_cer= 0.019230770\n",
+ "Epoch: 1892 avg_cer= 0.019230770\n",
+ "Epoch: 1893 avg_cer= 0.019230770\n",
+ "Epoch: 1894 avg_cer= 0.019230770\n",
+ "Epoch: 1895 avg_cer= 0.019230770\n",
+ "Epoch: 1896 avg_cer= 0.019230770\n",
+ "Epoch: 1897 avg_cer= 0.019230770\n",
+ "Epoch: 1898 avg_cer= 0.019230770\n",
+ "Epoch: 1899 avg_cer= 0.019230770\n",
+ "Epoch: 1900 avg_cer= 0.019230770\n",
+ "Epoch: 1901 avg_cer= 0.019230770\n",
+ "Epoch: 1902 avg_cer= 0.019230770\n",
+ "Epoch: 1903 avg_cer= 0.019230770\n",
+ "Epoch: 1904 avg_cer= 0.019230770\n",
+ "Epoch: 1905 avg_cer= 0.019230770\n",
+ "Epoch: 1906 avg_cer= 0.019230770\n",
+ "Epoch: 1907 avg_cer= 0.019230770\n",
+ "Epoch: 1908 avg_cer= 0.019230770\n",
+ "Epoch: 1909 avg_cer= 0.019230770\n",
+ "Epoch: 1910 avg_cer= 0.019230770\n",
+ "Epoch: 1911 avg_cer= 0.019230770\n",
+ "Epoch: 1912 avg_cer= 0.019230770\n",
+ "Epoch: 1913 avg_cer= 0.019230770\n",
+ "Epoch: 1914 avg_cer= 0.019230770\n",
+ "Epoch: 1915 avg_cer= 0.019230770\n",
+ "Epoch: 1916 avg_cer= 0.019230770\n",
+ "Epoch: 1917 avg_cer= 0.019230770\n",
+ "Epoch: 1918 avg_cer= 0.019230770\n",
+ "Epoch: 1919 avg_cer= 0.019230770\n",
+ "Epoch: 1920 avg_cer= 0.019230770\n",
+ "Epoch: 1921 avg_cer= 0.019230770\n",
+ "Epoch: 1922 avg_cer= 0.019230770\n",
+ "Epoch: 1923 avg_cer= 0.019230770\n",
+ "Epoch: 1924 avg_cer= 0.019230770\n",
+ "Epoch: 1925 avg_cer= 0.019230770\n",
+ "Epoch: 1926 avg_cer= 0.019230770\n",
+ "Epoch: 1927 avg_cer= 0.019230770\n",
+ "Epoch: 1928 avg_cer= 0.019230770\n",
+ "Epoch: 1929 avg_cer= 0.019230770\n",
+ "Epoch: 1930 avg_cer= 0.019230770\n",
+ "Epoch: 1931 avg_cer= 0.019230770\n",
+ "Epoch: 1932 avg_cer= 0.019230770\n",
+ "Epoch: 1933 avg_cer= 0.019230770\n",
+ "Epoch: 1934 avg_cer= 0.019230770\n",
+ "Epoch: 1935 avg_cer= 0.019230770\n",
+ "Epoch: 1936 avg_cer= 0.019230770\n",
+ "Epoch: 1937 avg_cer= 0.019230770\n",
+ "Epoch: 1938 avg_cer= 0.019230770\n",
+ "Epoch: 1939 avg_cer= 0.019230770\n",
+ "Epoch: 1940 avg_cer= 0.019230770\n",
+ "Epoch: 1941 avg_cer= 0.019230770\n",
+ "Epoch: 1942 avg_cer= 0.019230770\n",
+ "Epoch: 1943 avg_cer= 0.019230770\n",
+ "Epoch: 1944 avg_cer= 0.019230770\n",
+ "Epoch: 1945 avg_cer= 0.019230770\n",
+ "Epoch: 1946 avg_cer= 0.019230770\n",
+ "Epoch: 1947 avg_cer= 0.019230770\n",
+ "Epoch: 1948 avg_cer= 0.019230770\n",
+ "Epoch: 1949 avg_cer= 0.019230770\n",
+ "Epoch: 1950 avg_cer= 0.019230770\n",
+ "Epoch: 1951 avg_cer= 0.019230770\n",
+ "Epoch: 1952 avg_cer= 0.019230770\n",
+ "Epoch: 1953 avg_cer= 0.019230770\n",
+ "Epoch: 1954 avg_cer= 0.019230770\n",
+ "Epoch: 1955 avg_cer= 0.019230770\n",
+ "Epoch: 1956 avg_cer= 0.019230770\n",
+ "Epoch: 1957 avg_cer= 0.019230770\n",
+ "Epoch: 1958 avg_cer= 0.019230770\n",
+ "Epoch: 1959 avg_cer= 0.019230770\n",
+ "Epoch: 1960 avg_cer= 0.019230770\n",
+ "Epoch: 1961 avg_cer= 0.019230770\n",
+ "Epoch: 1962 avg_cer= 0.019230770\n",
+ "Epoch: 1963 avg_cer= 0.019230770\n",
+ "Epoch: 1964 avg_cer= 0.019230770\n",
+ "Epoch: 1965 avg_cer= 0.019230770\n",
+ "Epoch: 1966 avg_cer= 0.019230770\n",
+ "Epoch: 1967 avg_cer= 0.019230770\n",
+ "Epoch: 1968 avg_cer= 0.019230770\n",
+ "Epoch: 1969 avg_cer= 0.019230770\n",
+ "Epoch: 1970 avg_cer= 0.019230770\n",
+ "Epoch: 1971 avg_cer= 0.019230770\n",
+ "Epoch: 1972 avg_cer= 0.019230770\n",
+ "Epoch: 1973 avg_cer= 0.019230770\n",
+ "Epoch: 1974 avg_cer= 0.019230770\n",
+ "Epoch: 1975 avg_cer= 0.019230770\n",
+ "Epoch: 1976 avg_cer= 0.019230770\n",
+ "Epoch: 1977 avg_cer= 0.019230770\n",
+ "Epoch: 1978 avg_cer= 0.019230770\n",
+ "Epoch: 1979 avg_cer= 0.019230770\n",
+ "Epoch: 1980 avg_cer= 0.019230770\n",
+ "Epoch: 1981 avg_cer= 0.019230770\n",
+ "Epoch: 1982 avg_cer= 0.019230770\n",
+ "Epoch: 1983 avg_cer= 0.019230770\n",
+ "Epoch: 1984 avg_cer= 0.019230770\n",
+ "Epoch: 1985 avg_cer= 0.019230770\n",
+ "Epoch: 1986 avg_cer= 0.019230770\n",
+ "Epoch: 1987 avg_cer= 0.019230770\n",
+ "Epoch: 1988 avg_cer= 0.019230770\n",
+ "Epoch: 1989 avg_cer= 0.019230770\n",
+ "Epoch: 1990 avg_cer= 0.019230770\n",
+ "Epoch: 1991 avg_cer= 0.019230770\n",
+ "Epoch: 1992 avg_cer= 0.019230770\n",
+ "Epoch: 1993 avg_cer= 0.019230770\n",
+ "Epoch: 1994 avg_cer= 0.019230770\n",
+ "Epoch: 1995 avg_cer= 0.019230770\n",
+ "Epoch: 1996 avg_cer= 0.019230770\n",
+ "Epoch: 1997 avg_cer= 0.019230770\n",
+ "Epoch: 1998 avg_cer= 0.019230770\n",
+ "Epoch: 1999 avg_cer= 0.019230770\n",
+ "Epoch: 2000 avg_cer= 0.019230770\n",
+ "Epoch: 2001 avg_cer= 0.019230770\n",
+ "Epoch: 2002 avg_cer= 0.019230770\n",
+ "Epoch: 2003 avg_cer= 0.019230770\n",
+ "Epoch: 2004 avg_cer= 0.019230770\n",
+ "Epoch: 2005 avg_cer= 0.019230770\n",
+ "Epoch: 2006 avg_cer= 0.019230770\n",
+ "Epoch: 2007 avg_cer= 0.019230770\n",
+ "Epoch: 2008 avg_cer= 0.019230770\n",
+ "Epoch: 2009 avg_cer= 0.019230770\n",
+ "Epoch: 2010 avg_cer= 0.019230770\n",
+ "Epoch: 2011 avg_cer= 0.019230770\n",
+ "Epoch: 2012 avg_cer= 0.019230770\n",
+ "Epoch: 2013 avg_cer= 0.019230770\n",
+ "Epoch: 2014 avg_cer= 0.019230770\n",
+ "Epoch: 2015 avg_cer= 0.019230770\n",
+ "Epoch: 2016 avg_cer= 0.019230770\n",
+ "Epoch: 2017 avg_cer= 0.019230770\n",
+ "Epoch: 2018 avg_cer= 0.019230770\n",
+ "Epoch: 2019 avg_cer= 0.019230770\n",
+ "Epoch: 2020 avg_cer= 0.019230770\n",
+ "Epoch: 2021 avg_cer= 0.019230770\n",
+ "Epoch: 2022 avg_cer= 0.019230770\n",
+ "Epoch: 2023 avg_cer= 0.019230770\n",
+ "Epoch: 2024 avg_cer= 0.019230770\n",
+ "Epoch: 2025 avg_cer= 0.019230770\n",
+ "Epoch: 2026 avg_cer= 0.019230770\n",
+ "Epoch: 2027 avg_cer= 0.019230770\n",
+ "Epoch: 2028 avg_cer= 0.019230770\n",
+ "Epoch: 2029 avg_cer= 0.019230770\n",
+ "Epoch: 2030 avg_cer= 0.019230770\n",
+ "Epoch: 2031 avg_cer= 0.019230770\n",
+ "Epoch: 2032 avg_cer= 0.019230770\n",
+ "Epoch: 2033 avg_cer= 0.019230770\n",
+ "Epoch: 2034 avg_cer= 0.019230770\n",
+ "Epoch: 2035 avg_cer= 0.019230770\n",
+ "Epoch: 2036 avg_cer= 0.019230770\n",
+ "Epoch: 2037 avg_cer= 0.019230770\n",
+ "Epoch: 2038 avg_cer= 0.019230770\n",
+ "Epoch: 2039 avg_cer= 0.019230770\n",
+ "Epoch: 2040 avg_cer= 0.019230770\n",
+ "Epoch: 2041 avg_cer= 0.019230770\n",
+ "Epoch: 2042 avg_cer= 0.019230770\n",
+ "Epoch: 2043 avg_cer= 0.019230770\n",
+ "Epoch: 2044 avg_cer= 0.019230770\n",
+ "Epoch: 2045 avg_cer= 0.019230770\n",
+ "Epoch: 2046 avg_cer= 0.019230770\n",
+ "Epoch: 2047 avg_cer= 0.019230770\n",
+ "Epoch: 2048 avg_cer= 0.019230770\n",
+ "Epoch: 2049 avg_cer= 0.019230770\n",
+ "Epoch: 2050 avg_cer= 0.019230770\n",
+ "Epoch: 2051 avg_cer= 0.019230770\n",
+ "Epoch: 2052 avg_cer= 0.019230770\n",
+ "Epoch: 2053 avg_cer= 0.019230770\n",
+ "Epoch: 2054 avg_cer= 0.019230770\n",
+ "Epoch: 2055 avg_cer= 0.019230770\n",
+ "Epoch: 2056 avg_cer= 0.019230770\n",
+ "Epoch: 2057 avg_cer= 0.019230770\n",
+ "Epoch: 2058 avg_cer= 0.019230770\n",
+ "Epoch: 2059 avg_cer= 0.019230770\n",
+ "Epoch: 2060 avg_cer= 0.019230770\n",
+ "Epoch: 2061 avg_cer= 0.019230770\n",
+ "Epoch: 2062 avg_cer= 0.019230770\n",
+ "Epoch: 2063 avg_cer= 0.019230770\n",
+ "Epoch: 2064 avg_cer= 0.019230770\n",
+ "Epoch: 2065 avg_cer= 0.019230770\n",
+ "Epoch: 2066 avg_cer= 0.019230770\n",
+ "Epoch: 2067 avg_cer= 0.019230770\n",
+ "Epoch: 2068 avg_cer= 0.019230770\n",
+ "Epoch: 2069 avg_cer= 0.019230770\n",
+ "Epoch: 2070 avg_cer= 0.019230770\n",
+ "Epoch: 2071 avg_cer= 0.019230770\n",
+ "Epoch: 2072 avg_cer= 0.019230770\n",
+ "Epoch: 2073 avg_cer= 0.019230770\n",
+ "Epoch: 2074 avg_cer= 0.019230770\n",
+ "Epoch: 2075 avg_cer= 0.019230770\n",
+ "Epoch: 2076 avg_cer= 0.019230770\n",
+ "Epoch: 2077 avg_cer= 0.019230770\n",
+ "Epoch: 2078 avg_cer= 0.019230770\n",
+ "Epoch: 2079 avg_cer= 0.019230770\n",
+ "Epoch: 2080 avg_cer= 0.019230770\n",
+ "Epoch: 2081 avg_cer= 0.019230770\n",
+ "Epoch: 2082 avg_cer= 0.019230770\n",
+ "Epoch: 2083 avg_cer= 0.019230770\n",
+ "Epoch: 2084 avg_cer= 0.019230770\n",
+ "Epoch: 2085 avg_cer= 0.019230770\n",
+ "Epoch: 2086 avg_cer= 0.019230770\n",
+ "Epoch: 2087 avg_cer= 0.019230770\n",
+ "Epoch: 2088 avg_cer= 0.019230770\n",
+ "Epoch: 2089 avg_cer= 0.019230770\n",
+ "Epoch: 2090 avg_cer= 0.019230770\n",
+ "Epoch: 2091 avg_cer= 0.019230770\n",
+ "Epoch: 2092 avg_cer= 0.019230770\n",
+ "Epoch: 2093 avg_cer= 0.019230770\n",
+ "Epoch: 2094 avg_cer= 0.019230770\n",
+ "Epoch: 2095 avg_cer= 0.019230770\n",
+ "Epoch: 2096 avg_cer= 0.019230770\n",
+ "Epoch: 2097 avg_cer= 0.019230770\n",
+ "Epoch: 2098 avg_cer= 0.019230770\n",
+ "Epoch: 2099 avg_cer= 0.019230770\n",
+ "Epoch: 2100 avg_cer= 0.019230770\n",
+ "Epoch: 2101 avg_cer= 0.019230770\n",
+ "Epoch: 2102 avg_cer= 0.019230770\n",
+ "Epoch: 2103 avg_cer= 0.019230770\n",
+ "Epoch: 2104 avg_cer= 0.019230770\n",
+ "Epoch: 2105 avg_cer= 0.019230770\n",
+ "Epoch: 2106 avg_cer= 0.019230770\n",
+ "Epoch: 2107 avg_cer= 0.019230770\n",
+ "Epoch: 2108 avg_cer= 0.019230770\n",
+ "Epoch: 2109 avg_cer= 0.019230770\n",
+ "Epoch: 2110 avg_cer= 0.019230770\n",
+ "Epoch: 2111 avg_cer= 0.019230770\n",
+ "Epoch: 2112 avg_cer= 0.019230770\n",
+ "Epoch: 2113 avg_cer= 0.019230770\n",
+ "Epoch: 2114 avg_cer= 0.019230770\n",
+ "Epoch: 2115 avg_cer= 0.019230770\n",
+ "Epoch: 2116 avg_cer= 0.019230770\n",
+ "Epoch: 2117 avg_cer= 0.019230770\n",
+ "Epoch: 2118 avg_cer= 0.019230770\n",
+ "Epoch: 2119 avg_cer= 0.019230770\n",
+ "Epoch: 2120 avg_cer= 0.019230770\n",
+ "Epoch: 2121 avg_cer= 0.019230770\n",
+ "Epoch: 2122 avg_cer= 0.019230770\n",
+ "Epoch: 2123 avg_cer= 0.019230770\n",
+ "Epoch: 2124 avg_cer= 0.019230770\n",
+ "Epoch: 2125 avg_cer= 0.019230770\n",
+ "Epoch: 2126 avg_cer= 0.019230770\n",
+ "Epoch: 2127 avg_cer= 0.019230770\n",
+ "Epoch: 2128 avg_cer= 0.019230770\n",
+ "Epoch: 2129 avg_cer= 0.019230770\n",
+ "Epoch: 2130 avg_cer= 0.019230770\n",
+ "Epoch: 2131 avg_cer= 0.019230770\n",
+ "Epoch: 2132 avg_cer= 0.019230770\n",
+ "Epoch: 2133 avg_cer= 0.019230770\n",
+ "Epoch: 2134 avg_cer= 0.019230770\n",
+ "Epoch: 2135 avg_cer= 0.019230770\n",
+ "Epoch: 2136 avg_cer= 0.019230770\n",
+ "Epoch: 2137 avg_cer= 0.019230770\n",
+ "Epoch: 2138 avg_cer= 0.019230770\n",
+ "Epoch: 2139 avg_cer= 0.019230770\n",
+ "Epoch: 2140 avg_cer= 0.019230770\n",
+ "Epoch: 2141 avg_cer= 0.019230770\n",
+ "Epoch: 2142 avg_cer= 0.019230770\n",
+ "Epoch: 2143 avg_cer= 0.019230770\n",
+ "Epoch: 2144 avg_cer= 0.019230770\n",
+ "Epoch: 2145 avg_cer= 0.019230770\n",
+ "Epoch: 2146 avg_cer= 0.019230770\n",
+ "Epoch: 2147 avg_cer= 0.019230770\n",
+ "Epoch: 2148 avg_cer= 0.019230770\n",
+ "Epoch: 2149 avg_cer= 0.019230770\n",
+ "Epoch: 2150 avg_cer= 0.019230770\n",
+ "Epoch: 2151 avg_cer= 0.019230770\n",
+ "Epoch: 2152 avg_cer= 0.019230770\n",
+ "Epoch: 2153 avg_cer= 0.019230770\n",
+ "Epoch: 2154 avg_cer= 0.019230770\n",
+ "Epoch: 2155 avg_cer= 0.019230770\n",
+ "Epoch: 2156 avg_cer= 0.019230770\n",
+ "Epoch: 2157 avg_cer= 0.019230770\n",
+ "Epoch: 2158 avg_cer= 0.019230770\n",
+ "Epoch: 2159 avg_cer= 0.019230770\n",
+ "Epoch: 2160 avg_cer= 0.019230770\n",
+ "Epoch: 2161 avg_cer= 0.019230770\n",
+ "Epoch: 2162 avg_cer= 0.019230770\n",
+ "Epoch: 2163 avg_cer= 0.019230770\n",
+ "Epoch: 2164 avg_cer= 0.019230770\n",
+ "Epoch: 2165 avg_cer= 0.019230770\n",
+ "Epoch: 2166 avg_cer= 0.019230770\n",
+ "Epoch: 2167 avg_cer= 0.019230770\n",
+ "Epoch: 2168 avg_cer= 0.019230770\n",
+ "Epoch: 2169 avg_cer= 0.019230770\n",
+ "Epoch: 2170 avg_cer= 0.019230770\n",
+ "Epoch: 2171 avg_cer= 0.019230770\n",
+ "Epoch: 2172 avg_cer= 0.019230770\n",
+ "Epoch: 2173 avg_cer= 0.019230770\n",
+ "Epoch: 2174 avg_cer= 0.019230770\n",
+ "Epoch: 2175 avg_cer= 0.019230770\n",
+ "Epoch: 2176 avg_cer= 0.019230770\n",
+ "Epoch: 2177 avg_cer= 0.019230770\n",
+ "Epoch: 2178 avg_cer= 0.019230770\n",
+ "Epoch: 2179 avg_cer= 0.019230770\n",
+ "Epoch: 2180 avg_cer= 0.019230770\n",
+ "Epoch: 2181 avg_cer= 0.019230770\n",
+ "Epoch: 2182 avg_cer= 0.019230770\n",
+ "Epoch: 2183 avg_cer= 0.019230770\n",
+ "Epoch: 2184 avg_cer= 0.019230770\n",
+ "Epoch: 2185 avg_cer= 0.019230770\n",
+ "Epoch: 2186 avg_cer= 0.019230770\n",
+ "Epoch: 2187 avg_cer= 0.019230770\n",
+ "Epoch: 2188 avg_cer= 0.019230770\n",
+ "Epoch: 2189 avg_cer= 0.019230770\n",
+ "Epoch: 2190 avg_cer= 0.019230770\n",
+ "Epoch: 2191 avg_cer= 0.019230770\n",
+ "Epoch: 2192 avg_cer= 0.019230770\n",
+ "Epoch: 2193 avg_cer= 0.019230770\n",
+ "Epoch: 2194 avg_cer= 0.019230770\n",
+ "Epoch: 2195 avg_cer= 0.019230770\n",
+ "Epoch: 2196 avg_cer= 0.019230770\n",
+ "Epoch: 2197 avg_cer= 0.019230770\n",
+ "Epoch: 2198 avg_cer= 0.019230770\n",
+ "Epoch: 2199 avg_cer= 0.019230770\n",
+ "Epoch: 2200 avg_cer= 0.019230770\n",
+ "Epoch: 2201 avg_cer= 0.019230770\n",
+ "Epoch: 2202 avg_cer= 0.019230770\n",
+ "Epoch: 2203 avg_cer= 0.019230770\n",
+ "Epoch: 2204 avg_cer= 0.019230770\n",
+ "Epoch: 2205 avg_cer= 0.019230770\n",
+ "Epoch: 2206 avg_cer= 0.019230770\n",
+ "Epoch: 2207 avg_cer= 0.019230770\n",
+ "Epoch: 2208 avg_cer= 0.019230770\n",
+ "Epoch: 2209 avg_cer= 0.019230770\n",
+ "Epoch: 2210 avg_cer= 0.019230770\n",
+ "Epoch: 2211 avg_cer= 0.019230770\n",
+ "Epoch: 2212 avg_cer= 0.019230770\n",
+ "Epoch: 2213 avg_cer= 0.019230770\n",
+ "Epoch: 2214 avg_cer= 0.019230770\n",
+ "Epoch: 2215 avg_cer= 0.019230770\n",
+ "Epoch: 2216 avg_cer= 0.019230770\n",
+ "Epoch: 2217 avg_cer= 0.019230770\n",
+ "Epoch: 2218 avg_cer= 0.019230770\n",
+ "Epoch: 2219 avg_cer= 0.019230770\n",
+ "Epoch: 2220 avg_cer= 0.019230770\n",
+ "Epoch: 2221 avg_cer= 0.019230770\n",
+ "Epoch: 2222 avg_cer= 0.019230770\n",
+ "Epoch: 2223 avg_cer= 0.019230770\n",
+ "Epoch: 2224 avg_cer= 0.019230770\n",
+ "Epoch: 2225 avg_cer= 0.019230770\n",
+ "Epoch: 2226 avg_cer= 0.019230770\n",
+ "Epoch: 2227 avg_cer= 0.019230770\n",
+ "Epoch: 2228 avg_cer= 0.019230770\n",
+ "Epoch: 2229 avg_cer= 0.019230770\n",
+ "Epoch: 2230 avg_cer= 0.019230770\n",
+ "Epoch: 2231 avg_cer= 0.019230770\n",
+ "Epoch: 2232 avg_cer= 0.019230770\n",
+ "Epoch: 2233 avg_cer= 0.019230770\n",
+ "Epoch: 2234 avg_cer= 0.019230770\n",
+ "Epoch: 2235 avg_cer= 0.019230770\n",
+ "Epoch: 2236 avg_cer= 0.019230770\n",
+ "Epoch: 2237 avg_cer= 0.019230770\n",
+ "Epoch: 2238 avg_cer= 0.019230770\n",
+ "Epoch: 2239 avg_cer= 0.019230770\n",
+ "Epoch: 2240 avg_cer= 0.019230770\n",
+ "Epoch: 2241 avg_cer= 0.019230770\n",
+ "Epoch: 2242 avg_cer= 0.019230770\n",
+ "Epoch: 2243 avg_cer= 0.019230770\n",
+ "Epoch: 2244 avg_cer= 0.019230770\n",
+ "Epoch: 2245 avg_cer= 0.019230770\n",
+ "Epoch: 2246 avg_cer= 0.019230770\n",
+ "Epoch: 2247 avg_cer= 0.019230770\n",
+ "Epoch: 2248 avg_cer= 0.019230770\n",
+ "Epoch: 2249 avg_cer= 0.019230770\n",
+ "Epoch: 2250 avg_cer= 0.057692308\n",
+ "Epoch: 2251 avg_cer= 0.096153848\n",
+ "Epoch: 2252 avg_cer= 0.173076928\n",
+ "Epoch: 2253 avg_cer= 0.134615391\n",
+ "Epoch: 2254 avg_cer= 0.153846160\n",
+ "Epoch: 2255 avg_cer= 0.173076928\n",
+ "Epoch: 2256 avg_cer= 0.115384616\n",
+ "Epoch: 2257 avg_cer= 0.076923080\n",
+ "Epoch: 2258 avg_cer= 0.096153848\n",
+ "Epoch: 2259 avg_cer= 0.038461540\n",
+ "Epoch: 2260 avg_cer= 0.134615391\n",
+ "Epoch: 2261 avg_cer= 0.115384616\n",
+ "Epoch: 2262 avg_cer= 0.096153848\n",
+ "Epoch: 2263 avg_cer= 0.076923080\n",
+ "Epoch: 2264 avg_cer= 0.096153848\n",
+ "Epoch: 2265 avg_cer= 0.057692308\n",
+ "Epoch: 2266 avg_cer= 0.076923080\n",
+ "Epoch: 2267 avg_cer= 0.076923080\n",
+ "Epoch: 2268 avg_cer= 0.038461540\n",
+ "Epoch: 2269 avg_cer= 0.096153848\n",
+ "Epoch: 2270 avg_cer= 0.057692308\n",
+ "Epoch: 2271 avg_cer= 0.076923080\n",
+ "Epoch: 2272 avg_cer= 0.057692308\n",
+ "Epoch: 2273 avg_cer= 0.057692308\n",
+ "Epoch: 2274 avg_cer= 0.038461540\n",
+ "Epoch: 2275 avg_cer= 0.038461540\n",
+ "Epoch: 2276 avg_cer= 0.038461540\n",
+ "Epoch: 2277 avg_cer= 0.038461540\n",
+ "Epoch: 2278 avg_cer= 0.057692308\n",
+ "Epoch: 2279 avg_cer= 0.057692308\n",
+ "Epoch: 2280 avg_cer= 0.038461540\n",
+ "Epoch: 2281 avg_cer= 0.038461540\n",
+ "Epoch: 2282 avg_cer= 0.038461540\n",
+ "Epoch: 2283 avg_cer= 0.038461540\n",
+ "Epoch: 2284 avg_cer= 0.057692308\n",
+ "Epoch: 2285 avg_cer= 0.057692308\n",
+ "Epoch: 2286 avg_cer= 0.038461540\n",
+ "Epoch: 2287 avg_cer= 0.038461540\n",
+ "Epoch: 2288 avg_cer= 0.038461540\n",
+ "Epoch: 2289 avg_cer= 0.038461540\n",
+ "Epoch: 2290 avg_cer= 0.038461540\n",
+ "Epoch: 2291 avg_cer= 0.038461540\n",
+ "Epoch: 2292 avg_cer= 0.038461540\n",
+ "Epoch: 2293 avg_cer= 0.038461540\n",
+ "Epoch: 2294 avg_cer= 0.038461540\n",
+ "Epoch: 2295 avg_cer= 0.038461540\n",
+ "Epoch: 2296 avg_cer= 0.038461540\n",
+ "Epoch: 2297 avg_cer= 0.038461540\n",
+ "Epoch: 2298 avg_cer= 0.038461540\n",
+ "Epoch: 2299 avg_cer= 0.038461540\n",
+ "Epoch: 2300 avg_cer= 0.038461540\n",
+ "Epoch: 2301 avg_cer= 0.019230770\n",
+ "Epoch: 2302 avg_cer= 0.019230770\n",
+ "Epoch: 2303 avg_cer= 0.057692308\n",
+ "Epoch: 2304 avg_cer= 0.019230770\n",
+ "Epoch: 2305 avg_cer= 0.019230770\n",
+ "Epoch: 2306 avg_cer= 0.019230770\n",
+ "Epoch: 2307 avg_cer= 0.019230770\n",
+ "Epoch: 2308 avg_cer= 0.019230770\n",
+ "Epoch: 2309 avg_cer= 0.019230770\n",
+ "Epoch: 2310 avg_cer= 0.019230770\n",
+ "Epoch: 2311 avg_cer= 0.019230770\n",
+ "Epoch: 2312 avg_cer= 0.019230770\n",
+ "Epoch: 2313 avg_cer= 0.019230770\n",
+ "Epoch: 2314 avg_cer= 0.019230770\n",
+ "Epoch: 2315 avg_cer= 0.019230770\n",
+ "Epoch: 2316 avg_cer= 0.019230770\n",
+ "Epoch: 2317 avg_cer= 0.019230770\n",
+ "Epoch: 2318 avg_cer= 0.019230770\n",
+ "Epoch: 2319 avg_cer= 0.019230770\n",
+ "Epoch: 2320 avg_cer= 0.019230770\n",
+ "Epoch: 2321 avg_cer= 0.019230770\n",
+ "Epoch: 2322 avg_cer= 0.019230770\n",
+ "Epoch: 2323 avg_cer= 0.019230770\n",
+ "Epoch: 2324 avg_cer= 0.019230770\n",
+ "Epoch: 2325 avg_cer= 0.019230770\n",
+ "Epoch: 2326 avg_cer= 0.019230770\n",
+ "Epoch: 2327 avg_cer= 0.019230770\n",
+ "Epoch: 2328 avg_cer= 0.019230770\n",
+ "Epoch: 2329 avg_cer= 0.019230770\n",
+ "Epoch: 2330 avg_cer= 0.019230770\n",
+ "Epoch: 2331 avg_cer= 0.019230770\n",
+ "Epoch: 2332 avg_cer= 0.019230770\n",
+ "Epoch: 2333 avg_cer= 0.019230770\n",
+ "Epoch: 2334 avg_cer= 0.019230770\n",
+ "Epoch: 2335 avg_cer= 0.019230770\n",
+ "Epoch: 2336 avg_cer= 0.019230770\n",
+ "Epoch: 2337 avg_cer= 0.019230770\n",
+ "Epoch: 2338 avg_cer= 0.019230770\n",
+ "Epoch: 2339 avg_cer= 0.019230770\n",
+ "Epoch: 2340 avg_cer= 0.019230770\n",
+ "Epoch: 2341 avg_cer= 0.019230770\n",
+ "Epoch: 2342 avg_cer= 0.019230770\n",
+ "Epoch: 2343 avg_cer= 0.019230770\n",
+ "Epoch: 2344 avg_cer= 0.019230770\n",
+ "Epoch: 2345 avg_cer= 0.019230770\n",
+ "Epoch: 2346 avg_cer= 0.019230770\n",
+ "Epoch: 2347 avg_cer= 0.019230770\n",
+ "Epoch: 2348 avg_cer= 0.019230770\n",
+ "Epoch: 2349 avg_cer= 0.019230770\n",
+ "Epoch: 2350 avg_cer= 0.019230770\n",
+ "Epoch: 2351 avg_cer= 0.019230770\n",
+ "Epoch: 2352 avg_cer= 0.019230770\n",
+ "Epoch: 2353 avg_cer= 0.019230770\n",
+ "Epoch: 2354 avg_cer= 0.019230770\n",
+ "Epoch: 2355 avg_cer= 0.019230770\n",
+ "Epoch: 2356 avg_cer= 0.019230770\n",
+ "Epoch: 2357 avg_cer= 0.019230770\n",
+ "Epoch: 2358 avg_cer= 0.019230770\n",
+ "Epoch: 2359 avg_cer= 0.019230770\n",
+ "Epoch: 2360 avg_cer= 0.019230770\n",
+ "Epoch: 2361 avg_cer= 0.019230770\n",
+ "Epoch: 2362 avg_cer= 0.019230770\n",
+ "Epoch: 2363 avg_cer= 0.019230770\n",
+ "Epoch: 2364 avg_cer= 0.019230770\n",
+ "Epoch: 2365 avg_cer= 0.019230770\n",
+ "Epoch: 2366 avg_cer= 0.019230770\n",
+ "Epoch: 2367 avg_cer= 0.019230770\n",
+ "Epoch: 2368 avg_cer= 0.019230770\n",
+ "Epoch: 2369 avg_cer= 0.019230770\n",
+ "Epoch: 2370 avg_cer= 0.019230770\n",
+ "Epoch: 2371 avg_cer= 0.019230770\n",
+ "Epoch: 2372 avg_cer= 0.019230770\n",
+ "Epoch: 2373 avg_cer= 0.019230770\n",
+ "Epoch: 2374 avg_cer= 0.019230770\n",
+ "Epoch: 2375 avg_cer= 0.019230770\n",
+ "Epoch: 2376 avg_cer= 0.019230770\n",
+ "Epoch: 2377 avg_cer= 0.019230770\n",
+ "Epoch: 2378 avg_cer= 0.019230770\n",
+ "Epoch: 2379 avg_cer= 0.019230770\n",
+ "Epoch: 2380 avg_cer= 0.019230770\n",
+ "Epoch: 2381 avg_cer= 0.019230770\n",
+ "Epoch: 2382 avg_cer= 0.019230770\n",
+ "Epoch: 2383 avg_cer= 0.019230770\n",
+ "Epoch: 2384 avg_cer= 0.019230770\n",
+ "Epoch: 2385 avg_cer= 0.019230770\n",
+ "Epoch: 2386 avg_cer= 0.019230770\n",
+ "Epoch: 2387 avg_cer= 0.019230770\n",
+ "Epoch: 2388 avg_cer= 0.019230770\n",
+ "Epoch: 2389 avg_cer= 0.019230770\n",
+ "Epoch: 2390 avg_cer= 0.019230770\n",
+ "Epoch: 2391 avg_cer= 0.019230770\n",
+ "Epoch: 2392 avg_cer= 0.019230770\n",
+ "Epoch: 2393 avg_cer= 0.019230770\n",
+ "Epoch: 2394 avg_cer= 0.019230770\n",
+ "Epoch: 2395 avg_cer= 0.019230770\n",
+ "Epoch: 2396 avg_cer= 0.019230770\n",
+ "Epoch: 2397 avg_cer= 0.019230770\n",
+ "Epoch: 2398 avg_cer= 0.019230770\n",
+ "Epoch: 2399 avg_cer= 0.019230770\n",
+ "Epoch: 2400 avg_cer= 0.019230770\n",
+ "Epoch: 2401 avg_cer= 0.019230770\n",
+ "Epoch: 2402 avg_cer= 0.019230770\n",
+ "Epoch: 2403 avg_cer= 0.019230770\n",
+ "Epoch: 2404 avg_cer= 0.019230770\n",
+ "Epoch: 2405 avg_cer= 0.019230770\n",
+ "Epoch: 2406 avg_cer= 0.019230770\n",
+ "Epoch: 2407 avg_cer= 0.019230770\n",
+ "Epoch: 2408 avg_cer= 0.019230770\n",
+ "Epoch: 2409 avg_cer= 0.019230770\n",
+ "Epoch: 2410 avg_cer= 0.019230770\n",
+ "Epoch: 2411 avg_cer= 0.019230770\n",
+ "Epoch: 2412 avg_cer= 0.019230770\n",
+ "Epoch: 2413 avg_cer= 0.019230770\n",
+ "Epoch: 2414 avg_cer= 0.019230770\n",
+ "Epoch: 2415 avg_cer= 0.019230770\n",
+ "Epoch: 2416 avg_cer= 0.019230770\n",
+ "Epoch: 2417 avg_cer= 0.019230770\n",
+ "Epoch: 2418 avg_cer= 0.019230770\n",
+ "Epoch: 2419 avg_cer= 0.019230770\n",
+ "Epoch: 2420 avg_cer= 0.019230770\n",
+ "Epoch: 2421 avg_cer= 0.019230770\n",
+ "Epoch: 2422 avg_cer= 0.019230770\n",
+ "Epoch: 2423 avg_cer= 0.019230770\n",
+ "Epoch: 2424 avg_cer= 0.019230770\n",
+ "Epoch: 2425 avg_cer= 0.019230770\n",
+ "Epoch: 2426 avg_cer= 0.019230770\n",
+ "Epoch: 2427 avg_cer= 0.019230770\n",
+ "Epoch: 2428 avg_cer= 0.019230770\n",
+ "Epoch: 2429 avg_cer= 0.019230770\n",
+ "Epoch: 2430 avg_cer= 0.019230770\n",
+ "Epoch: 2431 avg_cer= 0.057692308\n",
+ "Epoch: 2432 avg_cer= 0.019230770\n",
+ "Epoch: 2433 avg_cer= 0.019230770\n",
+ "Epoch: 2434 avg_cer= 0.019230770\n",
+ "Epoch: 2435 avg_cer= 0.019230770\n",
+ "Epoch: 2436 avg_cer= 0.019230770\n",
+ "Epoch: 2437 avg_cer= 0.019230770\n",
+ "Epoch: 2438 avg_cer= 0.019230770\n",
+ "Epoch: 2439 avg_cer= 0.019230770\n",
+ "Epoch: 2440 avg_cer= 0.019230770\n",
+ "Epoch: 2441 avg_cer= 0.019230770\n",
+ "Epoch: 2442 avg_cer= 0.019230770\n",
+ "Epoch: 2443 avg_cer= 0.019230770\n",
+ "Epoch: 2444 avg_cer= 0.019230770\n",
+ "Epoch: 2445 avg_cer= 0.019230770\n",
+ "Epoch: 2446 avg_cer= 0.019230770\n",
+ "Epoch: 2447 avg_cer= 0.019230770\n",
+ "Epoch: 2448 avg_cer= 0.019230770\n",
+ "Epoch: 2449 avg_cer= 0.019230770\n",
+ "Epoch: 2450 avg_cer= 0.019230770\n",
+ "Epoch: 2451 avg_cer= 0.019230770\n",
+ "Epoch: 2452 avg_cer= 0.019230770\n",
+ "Epoch: 2453 avg_cer= 0.019230770\n",
+ "Epoch: 2454 avg_cer= 0.019230770\n",
+ "Epoch: 2455 avg_cer= 0.019230770\n",
+ "Epoch: 2456 avg_cer= 0.019230770\n",
+ "Epoch: 2457 avg_cer= 0.019230770\n",
+ "Epoch: 2458 avg_cer= 0.019230770\n",
+ "Epoch: 2459 avg_cer= 0.019230770\n",
+ "Epoch: 2460 avg_cer= 0.019230770\n",
+ "Epoch: 2461 avg_cer= 0.019230770\n",
+ "Epoch: 2462 avg_cer= 0.019230770\n",
+ "Epoch: 2463 avg_cer= 0.019230770\n",
+ "Epoch: 2464 avg_cer= 0.019230770\n",
+ "Epoch: 2465 avg_cer= 0.019230770\n",
+ "Epoch: 2466 avg_cer= 0.019230770\n",
+ "Epoch: 2467 avg_cer= 0.019230770\n",
+ "Epoch: 2468 avg_cer= 0.019230770\n",
+ "Epoch: 2469 avg_cer= 0.019230770\n",
+ "Epoch: 2470 avg_cer= 0.019230770\n",
+ "Epoch: 2471 avg_cer= 0.019230770\n",
+ "Epoch: 2472 avg_cer= 0.019230770\n",
+ "Epoch: 2473 avg_cer= 0.019230770\n",
+ "Epoch: 2474 avg_cer= 0.019230770\n",
+ "Epoch: 2475 avg_cer= 0.019230770\n",
+ "Epoch: 2476 avg_cer= 0.019230770\n",
+ "Epoch: 2477 avg_cer= 0.019230770\n",
+ "Epoch: 2478 avg_cer= 0.019230770\n",
+ "Epoch: 2479 avg_cer= 0.019230770\n",
+ "Epoch: 2480 avg_cer= 0.019230770\n",
+ "Epoch: 2481 avg_cer= 0.019230770\n",
+ "Epoch: 2482 avg_cer= 0.019230770\n",
+ "Epoch: 2483 avg_cer= 0.019230770\n",
+ "Epoch: 2484 avg_cer= 0.019230770\n",
+ "Epoch: 2485 avg_cer= 0.019230770\n",
+ "Epoch: 2486 avg_cer= 0.019230770\n",
+ "Epoch: 2487 avg_cer= 0.019230770\n",
+ "Epoch: 2488 avg_cer= 0.019230770\n",
+ "Epoch: 2489 avg_cer= 0.019230770\n",
+ "Epoch: 2490 avg_cer= 0.019230770\n",
+ "Epoch: 2491 avg_cer= 0.019230770\n",
+ "Epoch: 2492 avg_cer= 0.019230770\n",
+ "Epoch: 2493 avg_cer= 0.019230770\n",
+ "Epoch: 2494 avg_cer= 0.019230770\n",
+ "Epoch: 2495 avg_cer= 0.019230770\n",
+ "Epoch: 2496 avg_cer= 0.019230770\n",
+ "Epoch: 2497 avg_cer= 0.019230770\n",
+ "Epoch: 2498 avg_cer= 0.019230770\n",
+ "Epoch: 2499 avg_cer= 0.019230770\n",
+ "Epoch: 2500 avg_cer= 0.019230770\n",
+ "Epoch: 2501 avg_cer= 0.019230770\n",
+ "Epoch: 2502 avg_cer= 0.019230770\n",
+ "Epoch: 2503 avg_cer= 0.019230770\n",
+ "Epoch: 2504 avg_cer= 0.019230770\n",
+ "Epoch: 2505 avg_cer= 0.019230770\n",
+ "Epoch: 2506 avg_cer= 0.019230770\n",
+ "Epoch: 2507 avg_cer= 0.019230770\n",
+ "Epoch: 2508 avg_cer= 0.019230770\n",
+ "Epoch: 2509 avg_cer= 0.019230770\n",
+ "Epoch: 2510 avg_cer= 0.019230770\n",
+ "Epoch: 2511 avg_cer= 0.019230770\n",
+ "Epoch: 2512 avg_cer= 0.019230770\n",
+ "Epoch: 2513 avg_cer= 0.019230770\n",
+ "Epoch: 2514 avg_cer= 0.019230770\n",
+ "Epoch: 2515 avg_cer= 0.019230770\n",
+ "Epoch: 2516 avg_cer= 0.019230770\n",
+ "Epoch: 2517 avg_cer= 0.019230770\n",
+ "Epoch: 2518 avg_cer= 0.019230770\n",
+ "Epoch: 2519 avg_cer= 0.019230770\n",
+ "Epoch: 2520 avg_cer= 0.019230770\n",
+ "Epoch: 2521 avg_cer= 0.019230770\n",
+ "Epoch: 2522 avg_cer= 0.019230770\n",
+ "Epoch: 2523 avg_cer= 0.019230770\n",
+ "Epoch: 2524 avg_cer= 0.019230770\n",
+ "Epoch: 2525 avg_cer= 0.019230770\n",
+ "Epoch: 2526 avg_cer= 0.019230770\n",
+ "Epoch: 2527 avg_cer= 0.019230770\n",
+ "Epoch: 2528 avg_cer= 0.019230770\n",
+ "Epoch: 2529 avg_cer= 0.019230770\n",
+ "Epoch: 2530 avg_cer= 0.019230770\n",
+ "Epoch: 2531 avg_cer= 0.019230770\n",
+ "Epoch: 2532 avg_cer= 0.019230770\n",
+ "Epoch: 2533 avg_cer= 0.019230770\n",
+ "Epoch: 2534 avg_cer= 0.019230770\n",
+ "Epoch: 2535 avg_cer= 0.019230770\n",
+ "Epoch: 2536 avg_cer= 0.019230770\n",
+ "Epoch: 2537 avg_cer= 0.019230770\n",
+ "Epoch: 2538 avg_cer= 0.019230770\n",
+ "Epoch: 2539 avg_cer= 0.019230770\n",
+ "Epoch: 2540 avg_cer= 0.019230770\n",
+ "Epoch: 2541 avg_cer= 0.019230770\n",
+ "Epoch: 2542 avg_cer= 0.019230770\n",
+ "Epoch: 2543 avg_cer= 0.019230770\n",
+ "Epoch: 2544 avg_cer= 0.019230770\n",
+ "Epoch: 2545 avg_cer= 0.019230770\n",
+ "Epoch: 2546 avg_cer= 0.019230770\n",
+ "Epoch: 2547 avg_cer= 0.019230770\n",
+ "Epoch: 2548 avg_cer= 0.019230770\n",
+ "Epoch: 2549 avg_cer= 0.019230770\n",
+ "Epoch: 2550 avg_cer= 0.019230770\n",
+ "Epoch: 2551 avg_cer= 0.019230770\n",
+ "Epoch: 2552 avg_cer= 0.019230770\n",
+ "Epoch: 2553 avg_cer= 0.019230770\n",
+ "Epoch: 2554 avg_cer= 0.019230770\n",
+ "Epoch: 2555 avg_cer= 0.019230770\n",
+ "Epoch: 2556 avg_cer= 0.019230770\n",
+ "Epoch: 2557 avg_cer= 0.019230770\n",
+ "Epoch: 2558 avg_cer= 0.019230770\n",
+ "Epoch: 2559 avg_cer= 0.019230770\n",
+ "Epoch: 2560 avg_cer= 0.019230770\n",
+ "Epoch: 2561 avg_cer= 0.019230770\n",
+ "Epoch: 2562 avg_cer= 0.019230770\n",
+ "Epoch: 2563 avg_cer= 0.019230770\n",
+ "Epoch: 2564 avg_cer= 0.019230770\n",
+ "Epoch: 2565 avg_cer= 0.019230770\n",
+ "Epoch: 2566 avg_cer= 0.019230770\n",
+ "Epoch: 2567 avg_cer= 0.019230770\n",
+ "Epoch: 2568 avg_cer= 0.019230770\n",
+ "Epoch: 2569 avg_cer= 0.019230770\n",
+ "Epoch: 2570 avg_cer= 0.019230770\n",
+ "Epoch: 2571 avg_cer= 0.019230770\n",
+ "Epoch: 2572 avg_cer= 0.019230770\n",
+ "Epoch: 2573 avg_cer= 0.019230770\n",
+ "Epoch: 2574 avg_cer= 0.019230770\n",
+ "Epoch: 2575 avg_cer= 0.019230770\n",
+ "Epoch: 2576 avg_cer= 0.019230770\n",
+ "Epoch: 2577 avg_cer= 0.019230770\n",
+ "Epoch: 2578 avg_cer= 0.019230770\n",
+ "Epoch: 2579 avg_cer= 0.019230770\n",
+ "Epoch: 2580 avg_cer= 0.019230770\n",
+ "Epoch: 2581 avg_cer= 0.019230770\n",
+ "Epoch: 2582 avg_cer= 0.019230770\n",
+ "Epoch: 2583 avg_cer= 0.019230770\n",
+ "Epoch: 2584 avg_cer= 0.019230770\n",
+ "Epoch: 2585 avg_cer= 0.019230770\n",
+ "Epoch: 2586 avg_cer= 0.019230770\n",
+ "Epoch: 2587 avg_cer= 0.019230770\n",
+ "Epoch: 2588 avg_cer= 0.019230770\n",
+ "Epoch: 2589 avg_cer= 0.019230770\n",
+ "Epoch: 2590 avg_cer= 0.019230770\n",
+ "Epoch: 2591 avg_cer= 0.019230770\n",
+ "Epoch: 2592 avg_cer= 0.019230770\n",
+ "Epoch: 2593 avg_cer= 0.019230770\n",
+ "Epoch: 2594 avg_cer= 0.019230770\n",
+ "Epoch: 2595 avg_cer= 0.019230770\n",
+ "Epoch: 2596 avg_cer= 0.019230770\n",
+ "Epoch: 2597 avg_cer= 0.019230770\n",
+ "Epoch: 2598 avg_cer= 0.019230770\n",
+ "Epoch: 2599 avg_cer= 0.019230770\n",
+ "Epoch: 2600 avg_cer= 0.019230770\n",
+ "Epoch: 2601 avg_cer= 0.019230770\n",
+ "Epoch: 2602 avg_cer= 0.019230770\n",
+ "Epoch: 2603 avg_cer= 0.019230770\n",
+ "Epoch: 2604 avg_cer= 0.019230770\n",
+ "Epoch: 2605 avg_cer= 0.019230770\n",
+ "Epoch: 2606 avg_cer= 0.019230770\n",
+ "Epoch: 2607 avg_cer= 0.019230770\n",
+ "Epoch: 2608 avg_cer= 0.019230770\n",
+ "Epoch: 2609 avg_cer= 0.019230770\n",
+ "Epoch: 2610 avg_cer= 0.019230770\n",
+ "Epoch: 2611 avg_cer= 0.019230770\n",
+ "Epoch: 2612 avg_cer= 0.019230770\n",
+ "Epoch: 2613 avg_cer= 0.019230770\n",
+ "Epoch: 2614 avg_cer= 0.019230770\n",
+ "Epoch: 2615 avg_cer= 0.019230770\n",
+ "Epoch: 2616 avg_cer= 0.019230770\n",
+ "Epoch: 2617 avg_cer= 0.019230770\n",
+ "Epoch: 2618 avg_cer= 0.019230770\n",
+ "Epoch: 2619 avg_cer= 0.019230770\n",
+ "Epoch: 2620 avg_cer= 0.019230770\n",
+ "Epoch: 2621 avg_cer= 0.019230770\n",
+ "Epoch: 2622 avg_cer= 0.019230770\n",
+ "Epoch: 2623 avg_cer= 0.019230770\n",
+ "Epoch: 2624 avg_cer= 0.019230770\n",
+ "Epoch: 2625 avg_cer= 0.019230770\n",
+ "Epoch: 2626 avg_cer= 0.019230770\n",
+ "Epoch: 2627 avg_cer= 0.019230770\n",
+ "Epoch: 2628 avg_cer= 0.019230770\n",
+ "Epoch: 2629 avg_cer= 0.019230770\n",
+ "Epoch: 2630 avg_cer= 0.019230770\n",
+ "Epoch: 2631 avg_cer= 0.019230770\n",
+ "Epoch: 2632 avg_cer= 0.019230770\n",
+ "Epoch: 2633 avg_cer= 0.019230770\n",
+ "Epoch: 2634 avg_cer= 0.019230770\n",
+ "Epoch: 2635 avg_cer= 0.019230770\n",
+ "Epoch: 2636 avg_cer= 0.019230770\n",
+ "Epoch: 2637 avg_cer= 0.019230770\n",
+ "Epoch: 2638 avg_cer= 0.019230770\n",
+ "Epoch: 2639 avg_cer= 0.019230770\n",
+ "Epoch: 2640 avg_cer= 0.019230770\n",
+ "Epoch: 2641 avg_cer= 0.019230770\n",
+ "Epoch: 2642 avg_cer= 0.019230770\n",
+ "Epoch: 2643 avg_cer= 0.019230770\n",
+ "Epoch: 2644 avg_cer= 0.019230770\n",
+ "Epoch: 2645 avg_cer= 0.019230770\n",
+ "Epoch: 2646 avg_cer= 0.019230770\n",
+ "Epoch: 2647 avg_cer= 0.019230770\n",
+ "Epoch: 2648 avg_cer= 0.019230770\n",
+ "Epoch: 2649 avg_cer= 0.019230770\n",
+ "Epoch: 2650 avg_cer= 0.019230770\n",
+ "Epoch: 2651 avg_cer= 0.019230770\n",
+ "Epoch: 2652 avg_cer= 0.019230770\n",
+ "Epoch: 2653 avg_cer= 0.019230770\n",
+ "Epoch: 2654 avg_cer= 0.019230770\n",
+ "Epoch: 2655 avg_cer= 0.019230770\n",
+ "Epoch: 2656 avg_cer= 0.019230770\n",
+ "Epoch: 2657 avg_cer= 0.019230770\n",
+ "Epoch: 2658 avg_cer= 0.019230770\n",
+ "Epoch: 2659 avg_cer= 0.019230770\n",
+ "Epoch: 2660 avg_cer= 0.019230770\n",
+ "Epoch: 2661 avg_cer= 0.019230770\n",
+ "Epoch: 2662 avg_cer= 0.019230770\n",
+ "Epoch: 2663 avg_cer= 0.019230770\n",
+ "Epoch: 2664 avg_cer= 0.019230770\n",
+ "Epoch: 2665 avg_cer= 0.019230770\n",
+ "Epoch: 2666 avg_cer= 0.019230770\n",
+ "Epoch: 2667 avg_cer= 0.019230770\n",
+ "Epoch: 2668 avg_cer= 0.019230770\n",
+ "Epoch: 2669 avg_cer= 0.019230770\n",
+ "Epoch: 2670 avg_cer= 0.019230770\n",
+ "Epoch: 2671 avg_cer= 0.019230770\n",
+ "Epoch: 2672 avg_cer= 0.019230770\n",
+ "Epoch: 2673 avg_cer= 0.019230770\n",
+ "Epoch: 2674 avg_cer= 0.019230770\n",
+ "Epoch: 2675 avg_cer= 0.019230770\n",
+ "Epoch: 2676 avg_cer= 0.019230770\n",
+ "Epoch: 2677 avg_cer= 0.019230770\n",
+ "Epoch: 2678 avg_cer= 0.019230770\n",
+ "Epoch: 2679 avg_cer= 0.019230770\n",
+ "Epoch: 2680 avg_cer= 0.019230770\n",
+ "Epoch: 2681 avg_cer= 0.019230770\n",
+ "Epoch: 2682 avg_cer= 0.019230770\n",
+ "Epoch: 2683 avg_cer= 0.019230770\n",
+ "Epoch: 2684 avg_cer= 0.019230770\n",
+ "Epoch: 2685 avg_cer= 0.019230770\n",
+ "Epoch: 2686 avg_cer= 0.019230770\n",
+ "Epoch: 2687 avg_cer= 0.019230770\n",
+ "Epoch: 2688 avg_cer= 0.019230770\n",
+ "Epoch: 2689 avg_cer= 0.019230770\n",
+ "Epoch: 2690 avg_cer= 0.019230770\n",
+ "Epoch: 2691 avg_cer= 0.019230770\n",
+ "Epoch: 2692 avg_cer= 0.019230770\n",
+ "Epoch: 2693 avg_cer= 0.019230770\n",
+ "Epoch: 2694 avg_cer= 0.019230770\n",
+ "Epoch: 2695 avg_cer= 0.019230770\n",
+ "Epoch: 2696 avg_cer= 0.019230770\n",
+ "Epoch: 2697 avg_cer= 0.019230770\n",
+ "Epoch: 2698 avg_cer= 0.019230770\n",
+ "Epoch: 2699 avg_cer= 0.019230770\n",
+ "Epoch: 2700 avg_cer= 0.019230770\n",
+ "Epoch: 2701 avg_cer= 0.019230770\n",
+ "Epoch: 2702 avg_cer= 0.019230770\n",
+ "Epoch: 2703 avg_cer= 0.019230770\n",
+ "Epoch: 2704 avg_cer= 0.019230770\n",
+ "Epoch: 2705 avg_cer= 0.019230770\n",
+ "Epoch: 2706 avg_cer= 0.019230770\n",
+ "Epoch: 2707 avg_cer= 0.019230770\n",
+ "Epoch: 2708 avg_cer= 0.019230770\n",
+ "Epoch: 2709 avg_cer= 0.019230770\n",
+ "Epoch: 2710 avg_cer= 0.019230770\n",
+ "Epoch: 2711 avg_cer= 0.019230770\n",
+ "Epoch: 2712 avg_cer= 0.019230770\n",
+ "Epoch: 2713 avg_cer= 0.019230770\n",
+ "Epoch: 2714 avg_cer= 0.019230770\n",
+ "Epoch: 2715 avg_cer= 0.019230770\n",
+ "Epoch: 2716 avg_cer= 0.019230770\n",
+ "Epoch: 2717 avg_cer= 0.019230770\n",
+ "Epoch: 2718 avg_cer= 0.019230770\n",
+ "Epoch: 2719 avg_cer= 0.019230770\n",
+ "Epoch: 2720 avg_cer= 0.019230770\n",
+ "Epoch: 2721 avg_cer= 0.019230770\n",
+ "Epoch: 2722 avg_cer= 0.019230770\n",
+ "Epoch: 2723 avg_cer= 0.019230770\n",
+ "Epoch: 2724 avg_cer= 0.019230770\n",
+ "Epoch: 2725 avg_cer= 0.019230770\n",
+ "Epoch: 2726 avg_cer= 0.019230770\n",
+ "Epoch: 2727 avg_cer= 0.019230770\n",
+ "Epoch: 2728 avg_cer= 0.019230770\n",
+ "Epoch: 2729 avg_cer= 0.019230770\n",
+ "Epoch: 2730 avg_cer= 0.019230770\n",
+ "Epoch: 2731 avg_cer= 0.019230770\n",
+ "Epoch: 2732 avg_cer= 0.019230770\n",
+ "Epoch: 2733 avg_cer= 0.019230770\n",
+ "Epoch: 2734 avg_cer= 0.019230770\n",
+ "Epoch: 2735 avg_cer= 0.019230770\n",
+ "Epoch: 2736 avg_cer= 0.019230770\n",
+ "Epoch: 2737 avg_cer= 0.019230770\n",
+ "Epoch: 2738 avg_cer= 0.019230770\n",
+ "Epoch: 2739 avg_cer= 0.019230770\n",
+ "Epoch: 2740 avg_cer= 0.019230770\n",
+ "Epoch: 2741 avg_cer= 0.019230770\n",
+ "Epoch: 2742 avg_cer= 0.019230770\n",
+ "Epoch: 2743 avg_cer= 0.019230770\n",
+ "Epoch: 2744 avg_cer= 0.019230770\n",
+ "Epoch: 2745 avg_cer= 0.019230770\n",
+ "Epoch: 2746 avg_cer= 0.019230770\n",
+ "Epoch: 2747 avg_cer= 0.019230770\n",
+ "Epoch: 2748 avg_cer= 0.019230770\n",
+ "Epoch: 2749 avg_cer= 0.019230770\n",
+ "Epoch: 2750 avg_cer= 0.019230770\n",
+ "Epoch: 2751 avg_cer= 0.019230770\n",
+ "Epoch: 2752 avg_cer= 0.019230770\n",
+ "Epoch: 2753 avg_cer= 0.019230770\n",
+ "Epoch: 2754 avg_cer= 0.019230770\n",
+ "Epoch: 2755 avg_cer= 0.019230770\n",
+ "Epoch: 2756 avg_cer= 0.019230770\n",
+ "Epoch: 2757 avg_cer= 0.019230770\n",
+ "Epoch: 2758 avg_cer= 0.019230770\n",
+ "Epoch: 2759 avg_cer= 0.019230770\n",
+ "Epoch: 2760 avg_cer= 0.019230770\n",
+ "Epoch: 2761 avg_cer= 0.019230770\n",
+ "Epoch: 2762 avg_cer= 0.019230770\n",
+ "Epoch: 2763 avg_cer= 0.019230770\n",
+ "Epoch: 2764 avg_cer= 0.019230770\n",
+ "Epoch: 2765 avg_cer= 0.019230770\n",
+ "Epoch: 2766 avg_cer= 0.019230770\n",
+ "Epoch: 2767 avg_cer= 0.019230770\n",
+ "Epoch: 2768 avg_cer= 0.019230770\n",
+ "Epoch: 2769 avg_cer= 0.019230770\n",
+ "Epoch: 2770 avg_cer= 0.019230770\n",
+ "Epoch: 2771 avg_cer= 0.019230770\n",
+ "Epoch: 2772 avg_cer= 0.019230770\n",
+ "Epoch: 2773 avg_cer= 0.019230770\n",
+ "Epoch: 2774 avg_cer= 0.019230770\n",
+ "Epoch: 2775 avg_cer= 0.019230770\n",
+ "Epoch: 2776 avg_cer= 0.019230770\n",
+ "Epoch: 2777 avg_cer= 0.019230770\n",
+ "Epoch: 2778 avg_cer= 0.019230770\n",
+ "Epoch: 2779 avg_cer= 0.019230770\n",
+ "Epoch: 2780 avg_cer= 0.019230770\n",
+ "Epoch: 2781 avg_cer= 0.019230770\n",
+ "Epoch: 2782 avg_cer= 0.019230770\n",
+ "Epoch: 2783 avg_cer= 0.019230770\n",
+ "Epoch: 2784 avg_cer= 0.019230770\n",
+ "Epoch: 2785 avg_cer= 0.019230770\n",
+ "Epoch: 2786 avg_cer= 0.019230770\n",
+ "Epoch: 2787 avg_cer= 0.019230770\n",
+ "Epoch: 2788 avg_cer= 0.019230770\n",
+ "Epoch: 2789 avg_cer= 0.019230770\n",
+ "Epoch: 2790 avg_cer= 0.019230770\n",
+ "Epoch: 2791 avg_cer= 0.019230770\n",
+ "Epoch: 2792 avg_cer= 0.019230770\n",
+ "Epoch: 2793 avg_cer= 0.019230770\n",
+ "Epoch: 2794 avg_cer= 0.019230770\n",
+ "Epoch: 2795 avg_cer= 0.019230770\n",
+ "Epoch: 2796 avg_cer= 0.019230770\n",
+ "Epoch: 2797 avg_cer= 0.019230770\n",
+ "Epoch: 2798 avg_cer= 0.019230770\n",
+ "Epoch: 2799 avg_cer= 0.019230770\n",
+ "Epoch: 2800 avg_cer= 0.019230770\n",
+ "Epoch: 2801 avg_cer= 0.019230770\n",
+ "Epoch: 2802 avg_cer= 0.019230770\n",
+ "Epoch: 2803 avg_cer= 0.019230770\n",
+ "Epoch: 2804 avg_cer= 0.019230770\n",
+ "Epoch: 2805 avg_cer= 0.019230770\n",
+ "Epoch: 2806 avg_cer= 0.019230770\n",
+ "Epoch: 2807 avg_cer= 0.019230770\n",
+ "Epoch: 2808 avg_cer= 0.019230770\n",
+ "Epoch: 2809 avg_cer= 0.019230770\n",
+ "Epoch: 2810 avg_cer= 0.019230770\n",
+ "Epoch: 2811 avg_cer= 0.019230770\n",
+ "Epoch: 2812 avg_cer= 0.019230770\n",
+ "Epoch: 2813 avg_cer= 0.019230770\n",
+ "Epoch: 2814 avg_cer= 0.019230770\n",
+ "Epoch: 2815 avg_cer= 0.019230770\n",
+ "Epoch: 2816 avg_cer= 0.019230770\n",
+ "Epoch: 2817 avg_cer= 0.019230770\n",
+ "Epoch: 2818 avg_cer= 0.019230770\n",
+ "Epoch: 2819 avg_cer= 0.019230770\n",
+ "Epoch: 2820 avg_cer= 0.019230770\n",
+ "Epoch: 2821 avg_cer= 0.019230770\n",
+ "Epoch: 2822 avg_cer= 0.019230770\n",
+ "Epoch: 2823 avg_cer= 0.019230770\n",
+ "Epoch: 2824 avg_cer= 0.019230770\n",
+ "Epoch: 2825 avg_cer= 0.019230770\n",
+ "Epoch: 2826 avg_cer= 0.019230770\n",
+ "Epoch: 2827 avg_cer= 0.019230770\n",
+ "Epoch: 2828 avg_cer= 0.019230770\n",
+ "Epoch: 2829 avg_cer= 0.019230770\n",
+ "Epoch: 2830 avg_cer= 0.019230770\n",
+ "Epoch: 2831 avg_cer= 0.019230770\n",
+ "Epoch: 2832 avg_cer= 0.019230770\n",
+ "Epoch: 2833 avg_cer= 0.019230770\n",
+ "Epoch: 2834 avg_cer= 0.019230770\n",
+ "Epoch: 2835 avg_cer= 0.019230770\n",
+ "Epoch: 2836 avg_cer= 0.019230770\n",
+ "Epoch: 2837 avg_cer= 0.019230770\n",
+ "Epoch: 2838 avg_cer= 0.019230770\n",
+ "Epoch: 2839 avg_cer= 0.019230770\n",
+ "Epoch: 2840 avg_cer= 0.019230770\n",
+ "Epoch: 2841 avg_cer= 0.019230770\n",
+ "Epoch: 2842 avg_cer= 0.019230770\n",
+ "Epoch: 2843 avg_cer= 0.019230770\n",
+ "Epoch: 2844 avg_cer= 0.019230770\n",
+ "Epoch: 2845 avg_cer= 0.019230770\n",
+ "Epoch: 2846 avg_cer= 0.019230770\n",
+ "Epoch: 2847 avg_cer= 0.019230770\n",
+ "Epoch: 2848 avg_cer= 0.019230770\n",
+ "Epoch: 2849 avg_cer= 0.019230770\n",
+ "Epoch: 2850 avg_cer= 0.019230770\n",
+ "Epoch: 2851 avg_cer= 0.019230770\n",
+ "Epoch: 2852 avg_cer= 0.019230770\n",
+ "Epoch: 2853 avg_cer= 0.019230770\n",
+ "Epoch: 2854 avg_cer= 0.019230770\n",
+ "Epoch: 2855 avg_cer= 0.019230770\n",
+ "Epoch: 2856 avg_cer= 0.019230770\n",
+ "Epoch: 2857 avg_cer= 0.019230770\n",
+ "Epoch: 2858 avg_cer= 0.019230770\n",
+ "Epoch: 2859 avg_cer= 0.019230770\n",
+ "Epoch: 2860 avg_cer= 0.019230770\n",
+ "Epoch: 2861 avg_cer= 0.019230770\n",
+ "Epoch: 2862 avg_cer= 0.019230770\n",
+ "Epoch: 2863 avg_cer= 0.019230770\n",
+ "Epoch: 2864 avg_cer= 0.019230770\n",
+ "Epoch: 2865 avg_cer= 0.019230770\n",
+ "Epoch: 2866 avg_cer= 0.019230770\n",
+ "Epoch: 2867 avg_cer= 0.019230770\n",
+ "Epoch: 2868 avg_cer= 0.019230770\n",
+ "Epoch: 2869 avg_cer= 0.019230770\n",
+ "Epoch: 2870 avg_cer= 0.019230770\n",
+ "Epoch: 2871 avg_cer= 0.019230770\n",
+ "Epoch: 2872 avg_cer= 0.019230770\n",
+ "Epoch: 2873 avg_cer= 0.019230770\n",
+ "Epoch: 2874 avg_cer= 0.019230770\n",
+ "Epoch: 2875 avg_cer= 0.019230770\n",
+ "Epoch: 2876 avg_cer= 0.019230770\n",
+ "Epoch: 2877 avg_cer= 0.019230770\n",
+ "Epoch: 2878 avg_cer= 0.019230770\n",
+ "Epoch: 2879 avg_cer= 0.019230770\n",
+ "Epoch: 2880 avg_cer= 0.019230770\n",
+ "Epoch: 2881 avg_cer= 0.019230770\n",
+ "Epoch: 2882 avg_cer= 0.019230770\n",
+ "Epoch: 2883 avg_cer= 0.019230770\n",
+ "Epoch: 2884 avg_cer= 0.019230770\n",
+ "Epoch: 2885 avg_cer= 0.019230770\n",
+ "Epoch: 2886 avg_cer= 0.019230770\n",
+ "Epoch: 2887 avg_cer= 0.019230770\n",
+ "Epoch: 2888 avg_cer= 0.019230770\n",
+ "Epoch: 2889 avg_cer= 0.019230770\n",
+ "Epoch: 2890 avg_cer= 0.019230770\n",
+ "Epoch: 2891 avg_cer= 0.019230770\n",
+ "Epoch: 2892 avg_cer= 0.019230770\n",
+ "Epoch: 2893 avg_cer= 0.019230770\n",
+ "Epoch: 2894 avg_cer= 0.019230770\n",
+ "Epoch: 2895 avg_cer= 0.019230770\n",
+ "Epoch: 2896 avg_cer= 0.019230770\n",
+ "Epoch: 2897 avg_cer= 0.019230770\n",
+ "Epoch: 2898 avg_cer= 0.019230770\n",
+ "Epoch: 2899 avg_cer= 0.019230770\n",
+ "Epoch: 2900 avg_cer= 0.019230770\n",
+ "Epoch: 2901 avg_cer= 0.019230770\n",
+ "Epoch: 2902 avg_cer= 0.019230770\n",
+ "Epoch: 2903 avg_cer= 0.019230770\n",
+ "Epoch: 2904 avg_cer= 0.019230770\n",
+ "Epoch: 2905 avg_cer= 0.019230770\n",
+ "Epoch: 2906 avg_cer= 0.019230770\n",
+ "Epoch: 2907 avg_cer= 0.019230770\n",
+ "Epoch: 2908 avg_cer= 0.019230770\n",
+ "Epoch: 2909 avg_cer= 0.019230770\n",
+ "Epoch: 2910 avg_cer= 0.019230770\n",
+ "Epoch: 2911 avg_cer= 0.019230770\n",
+ "Epoch: 2912 avg_cer= 0.019230770\n",
+ "Epoch: 2913 avg_cer= 0.019230770\n",
+ "Epoch: 2914 avg_cer= 0.019230770\n",
+ "Epoch: 2915 avg_cer= 0.019230770\n",
+ "Epoch: 2916 avg_cer= 0.019230770\n",
+ "Epoch: 2917 avg_cer= 0.019230770\n",
+ "Epoch: 2918 avg_cer= 0.019230770\n",
+ "Epoch: 2919 avg_cer= 0.019230770\n",
+ "Epoch: 2920 avg_cer= 0.019230770\n",
+ "Epoch: 2921 avg_cer= 0.019230770\n",
+ "Epoch: 2922 avg_cer= 0.019230770\n",
+ "Epoch: 2923 avg_cer= 0.019230770\n",
+ "Epoch: 2924 avg_cer= 0.019230770\n",
+ "Epoch: 2925 avg_cer= 0.019230770\n",
+ "Epoch: 2926 avg_cer= 0.019230770\n",
+ "Epoch: 2927 avg_cer= 0.019230770\n",
+ "Epoch: 2928 avg_cer= 0.019230770\n",
+ "Epoch: 2929 avg_cer= 0.019230770\n",
+ "Epoch: 2930 avg_cer= 0.019230770\n",
+ "Epoch: 2931 avg_cer= 0.019230770\n",
+ "Epoch: 2932 avg_cer= 0.019230770\n",
+ "Epoch: 2933 avg_cer= 0.019230770\n",
+ "Epoch: 2934 avg_cer= 0.019230770\n",
+ "Epoch: 2935 avg_cer= 0.019230770\n",
+ "Epoch: 2936 avg_cer= 0.019230770\n",
+ "Epoch: 2937 avg_cer= 0.019230770\n",
+ "Epoch: 2938 avg_cer= 0.019230770\n",
+ "Epoch: 2939 avg_cer= 0.019230770\n",
+ "Epoch: 2940 avg_cer= 0.019230770\n",
+ "Epoch: 2941 avg_cer= 0.019230770\n",
+ "Epoch: 2942 avg_cer= 0.019230770\n",
+ "Epoch: 2943 avg_cer= 0.019230770\n",
+ "Epoch: 2944 avg_cer= 0.019230770\n",
+ "Epoch: 2945 avg_cer= 0.019230770\n",
+ "Epoch: 2946 avg_cer= 0.019230770\n",
+ "Epoch: 2947 avg_cer= 0.019230770\n",
+ "Epoch: 2948 avg_cer= 0.019230770\n",
+ "Epoch: 2949 avg_cer= 0.019230770\n",
+ "Epoch: 2950 avg_cer= 0.019230770\n",
+ "Epoch: 2951 avg_cer= 0.019230770\n",
+ "Epoch: 2952 avg_cer= 0.019230770\n",
+ "Epoch: 2953 avg_cer= 0.019230770\n",
+ "Epoch: 2954 avg_cer= 0.019230770\n",
+ "Epoch: 2955 avg_cer= 0.019230770\n",
+ "Epoch: 2956 avg_cer= 0.019230770\n",
+ "Epoch: 2957 avg_cer= 0.019230770\n",
+ "Epoch: 2958 avg_cer= 0.019230770\n",
+ "Epoch: 2959 avg_cer= 0.019230770\n",
+ "Epoch: 2960 avg_cer= 0.019230770\n",
+ "Epoch: 2961 avg_cer= 0.019230770\n",
+ "Epoch: 2962 avg_cer= 0.019230770\n",
+ "Epoch: 2963 avg_cer= 0.019230770\n",
+ "Epoch: 2964 avg_cer= 0.019230770\n",
+ "Epoch: 2965 avg_cer= 0.019230770\n",
+ "Epoch: 2966 avg_cer= 0.019230770\n",
+ "Epoch: 2967 avg_cer= 0.019230770\n",
+ "Epoch: 2968 avg_cer= 0.019230770\n",
+ "Epoch: 2969 avg_cer= 0.019230770\n",
+ "Epoch: 2970 avg_cer= 0.019230770\n",
+ "Epoch: 2971 avg_cer= 0.019230770\n",
+ "Epoch: 2972 avg_cer= 0.019230770\n",
+ "Epoch: 2973 avg_cer= 0.019230770\n",
+ "Epoch: 2974 avg_cer= 0.019230770\n",
+ "Epoch: 2975 avg_cer= 0.019230770\n",
+ "Epoch: 2976 avg_cer= 0.019230770\n",
+ "Epoch: 2977 avg_cer= 0.019230770\n",
+ "Epoch: 2978 avg_cer= 0.019230770\n",
+ "Epoch: 2979 avg_cer= 0.019230770\n",
+ "Epoch: 2980 avg_cer= 0.019230770\n",
+ "Epoch: 2981 avg_cer= 0.019230770\n",
+ "Epoch: 2982 avg_cer= 0.019230770\n",
+ "Epoch: 2983 avg_cer= 0.019230770\n",
+ "Epoch: 2984 avg_cer= 0.019230770\n",
+ "Epoch: 2985 avg_cer= 0.019230770\n",
+ "Epoch: 2986 avg_cer= 0.019230770\n",
+ "Epoch: 2987 avg_cer= 0.019230770\n",
+ "Epoch: 2988 avg_cer= 0.019230770\n",
+ "Epoch: 2989 avg_cer= 0.019230770\n",
+ "Epoch: 2990 avg_cer= 0.019230770\n",
+ "Epoch: 2991 avg_cer= 0.019230770\n",
+ "Epoch: 2992 avg_cer= 0.019230770\n",
+ "Epoch: 2993 avg_cer= 0.019230770\n",
+ "Epoch: 2994 avg_cer= 0.019230770\n",
+ "Epoch: 2995 avg_cer= 0.019230770\n",
+ "Epoch: 2996 avg_cer= 0.019230770\n",
+ "Epoch: 2997 avg_cer= 0.019230770\n",
+ "Epoch: 2998 avg_cer= 0.019230770\n",
+ "Epoch: 2999 avg_cer= 0.019230770\n",
+ "Epoch: 3000 avg_cer= 0.019230770\n",
+ "Epoch: 3001 avg_cer= 0.019230770\n",
+ "Epoch: 3002 avg_cer= 0.019230770\n",
+ "Epoch: 3003 avg_cer= 0.019230770\n",
+ "Epoch: 3004 avg_cer= 0.019230770\n",
+ "Epoch: 3005 avg_cer= 0.019230770\n",
+ "Epoch: 3006 avg_cer= 0.019230770\n",
+ "Epoch: 3007 avg_cer= 0.019230770\n",
+ "Epoch: 3008 avg_cer= 0.019230770\n",
+ "Epoch: 3009 avg_cer= 0.019230770\n",
+ "Epoch: 3010 avg_cer= 0.019230770\n",
+ "Epoch: 3011 avg_cer= 0.019230770\n",
+ "Epoch: 3012 avg_cer= 0.019230770\n",
+ "Epoch: 3013 avg_cer= 0.019230770\n",
+ "Epoch: 3014 avg_cer= 0.019230770\n",
+ "Epoch: 3015 avg_cer= 0.019230770\n",
+ "Epoch: 3016 avg_cer= 0.019230770\n",
+ "Epoch: 3017 avg_cer= 0.019230770\n",
+ "Epoch: 3018 avg_cer= 0.019230770\n",
+ "Epoch: 3019 avg_cer= 0.019230770\n",
+ "Epoch: 3020 avg_cer= 0.019230770\n",
+ "Epoch: 3021 avg_cer= 0.019230770\n",
+ "Epoch: 3022 avg_cer= 0.019230770\n",
+ "Epoch: 3023 avg_cer= 0.019230770\n",
+ "Epoch: 3024 avg_cer= 0.019230770\n",
+ "Epoch: 3025 avg_cer= 0.019230770\n",
+ "Epoch: 3026 avg_cer= 0.019230770\n",
+ "Epoch: 3027 avg_cer= 0.019230770\n",
+ "Epoch: 3028 avg_cer= 0.019230770\n",
+ "Epoch: 3029 avg_cer= 0.019230770\n",
+ "Epoch: 3030 avg_cer= 0.019230770\n",
+ "Epoch: 3031 avg_cer= 0.019230770\n",
+ "Epoch: 3032 avg_cer= 0.019230770\n",
+ "Epoch: 3033 avg_cer= 0.019230770\n",
+ "Epoch: 3034 avg_cer= 0.019230770\n",
+ "Epoch: 3035 avg_cer= 0.019230770\n",
+ "Epoch: 3036 avg_cer= 0.019230770\n",
+ "Epoch: 3037 avg_cer= 0.019230770\n",
+ "Epoch: 3038 avg_cer= 0.019230770\n",
+ "Epoch: 3039 avg_cer= 0.019230770\n",
+ "Epoch: 3040 avg_cer= 0.019230770\n",
+ "Epoch: 3041 avg_cer= 0.019230770\n",
+ "Epoch: 3042 avg_cer= 0.019230770\n",
+ "Epoch: 3043 avg_cer= 0.019230770\n",
+ "Epoch: 3044 avg_cer= 0.019230770\n",
+ "Epoch: 3045 avg_cer= 0.019230770\n",
+ "Epoch: 3046 avg_cer= 0.019230770\n",
+ "Epoch: 3047 avg_cer= 0.019230770\n",
+ "Epoch: 3048 avg_cer= 0.019230770\n",
+ "Epoch: 3049 avg_cer= 0.019230770\n",
+ "Epoch: 3050 avg_cer= 0.019230770\n",
+ "Epoch: 3051 avg_cer= 0.019230770\n",
+ "Epoch: 3052 avg_cer= 0.019230770\n",
+ "Epoch: 3053 avg_cer= 0.019230770\n",
+ "Epoch: 3054 avg_cer= 0.019230770\n",
+ "Epoch: 3055 avg_cer= 0.019230770\n",
+ "Epoch: 3056 avg_cer= 0.019230770\n",
+ "Epoch: 3057 avg_cer= 0.019230770\n",
+ "Epoch: 3058 avg_cer= 0.019230770\n",
+ "Epoch: 3059 avg_cer= 0.019230770\n",
+ "Epoch: 3060 avg_cer= 0.019230770\n",
+ "Epoch: 3061 avg_cer= 0.019230770\n",
+ "Epoch: 3062 avg_cer= 0.019230770\n",
+ "Epoch: 3063 avg_cer= 0.019230770\n",
+ "Epoch: 3064 avg_cer= 0.019230770\n",
+ "Epoch: 3065 avg_cer= 0.019230770\n",
+ "Epoch: 3066 avg_cer= 0.019230770\n",
+ "Epoch: 3067 avg_cer= 0.019230770\n",
+ "Epoch: 3068 avg_cer= 0.019230770\n",
+ "Epoch: 3069 avg_cer= 0.019230770\n",
+ "Epoch: 3070 avg_cer= 0.019230770\n",
+ "Epoch: 3071 avg_cer= 0.019230770\n",
+ "Epoch: 3072 avg_cer= 0.019230770\n",
+ "Epoch: 3073 avg_cer= 0.019230770\n",
+ "Epoch: 3074 avg_cer= 0.019230770\n",
+ "Epoch: 3075 avg_cer= 0.019230770\n",
+ "Epoch: 3076 avg_cer= 0.019230770\n",
+ "Epoch: 3077 avg_cer= 0.019230770\n",
+ "Epoch: 3078 avg_cer= 0.019230770\n",
+ "Epoch: 3079 avg_cer= 0.019230770\n",
+ "Epoch: 3080 avg_cer= 0.019230770\n",
+ "Epoch: 3081 avg_cer= 0.019230770\n",
+ "Epoch: 3082 avg_cer= 0.019230770\n",
+ "Epoch: 3083 avg_cer= 0.019230770\n",
+ "Epoch: 3084 avg_cer= 0.019230770\n",
+ "Epoch: 3085 avg_cer= 0.019230770\n",
+ "Epoch: 3086 avg_cer= 0.019230770\n",
+ "Epoch: 3087 avg_cer= 0.019230770\n",
+ "Epoch: 3088 avg_cer= 0.019230770\n",
+ "Epoch: 3089 avg_cer= 0.019230770\n",
+ "Epoch: 3090 avg_cer= 0.019230770\n",
+ "Epoch: 3091 avg_cer= 0.019230770\n",
+ "Epoch: 3092 avg_cer= 0.019230770\n",
+ "Epoch: 3093 avg_cer= 0.019230770\n",
+ "Epoch: 3094 avg_cer= 0.019230770\n",
+ "Epoch: 3095 avg_cer= 0.019230770\n",
+ "Epoch: 3096 avg_cer= 0.019230770\n",
+ "Epoch: 3097 avg_cer= 0.019230770\n",
+ "Epoch: 3098 avg_cer= 0.019230770\n",
+ "Epoch: 3099 avg_cer= 0.019230770\n",
+ "Epoch: 3100 avg_cer= 0.019230770\n",
+ "Epoch: 3101 avg_cer= 0.019230770\n",
+ "Epoch: 3102 avg_cer= 0.019230770\n",
+ "Epoch: 3103 avg_cer= 0.019230770\n",
+ "Epoch: 3104 avg_cer= 0.019230770\n",
+ "Epoch: 3105 avg_cer= 0.019230770\n",
+ "Epoch: 3106 avg_cer= 0.019230770\n",
+ "Epoch: 3107 avg_cer= 0.019230770\n",
+ "Epoch: 3108 avg_cer= 0.019230770\n",
+ "Epoch: 3109 avg_cer= 0.019230770\n",
+ "Epoch: 3110 avg_cer= 0.019230770\n",
+ "Epoch: 3111 avg_cer= 0.019230770\n",
+ "Epoch: 3112 avg_cer= 0.019230770\n",
+ "Epoch: 3113 avg_cer= 0.019230770\n",
+ "Epoch: 3114 avg_cer= 0.019230770\n",
+ "Epoch: 3115 avg_cer= 0.019230770\n",
+ "Epoch: 3116 avg_cer= 0.019230770\n",
+ "Epoch: 3117 avg_cer= 0.019230770\n",
+ "Epoch: 3118 avg_cer= 0.019230770\n",
+ "Epoch: 3119 avg_cer= 0.019230770\n",
+ "Epoch: 3120 avg_cer= 0.019230770\n",
+ "Epoch: 3121 avg_cer= 0.019230770\n",
+ "Epoch: 3122 avg_cer= 0.019230770\n",
+ "Epoch: 3123 avg_cer= 0.019230770\n",
+ "Epoch: 3124 avg_cer= 0.019230770\n",
+ "Epoch: 3125 avg_cer= 0.019230770\n",
+ "Epoch: 3126 avg_cer= 0.019230770\n",
+ "Epoch: 3127 avg_cer= 0.019230770\n",
+ "Epoch: 3128 avg_cer= 0.019230770\n",
+ "Epoch: 3129 avg_cer= 0.019230770\n",
+ "Epoch: 3130 avg_cer= 0.019230770\n",
+ "Epoch: 3131 avg_cer= 0.019230770\n",
+ "Epoch: 3132 avg_cer= 0.019230770\n",
+ "Epoch: 3133 avg_cer= 0.019230770\n",
+ "Epoch: 3134 avg_cer= 0.019230770\n",
+ "Epoch: 3135 avg_cer= 0.019230770\n",
+ "Epoch: 3136 avg_cer= 0.019230770\n",
+ "Epoch: 3137 avg_cer= 0.019230770\n",
+ "Epoch: 3138 avg_cer= 0.019230770\n",
+ "Epoch: 3139 avg_cer= 0.019230770\n",
+ "Epoch: 3140 avg_cer= 0.019230770\n",
+ "Epoch: 3141 avg_cer= 0.019230770\n",
+ "Epoch: 3142 avg_cer= 0.019230770\n",
+ "Epoch: 3143 avg_cer= 0.019230770\n",
+ "Epoch: 3144 avg_cer= 0.019230770\n",
+ "Epoch: 3145 avg_cer= 0.019230770\n",
+ "Epoch: 3146 avg_cer= 0.019230770\n",
+ "Epoch: 3147 avg_cer= 0.019230770\n",
+ "Epoch: 3148 avg_cer= 0.019230770\n",
+ "Epoch: 3149 avg_cer= 0.019230770\n",
+ "Epoch: 3150 avg_cer= 0.019230770\n",
+ "Epoch: 3151 avg_cer= 0.019230770\n",
+ "Epoch: 3152 avg_cer= 0.019230770\n",
+ "Epoch: 3153 avg_cer= 0.019230770\n",
+ "Epoch: 3154 avg_cer= 0.019230770\n",
+ "Epoch: 3155 avg_cer= 0.019230770\n",
+ "Epoch: 3156 avg_cer= 0.019230770\n",
+ "Epoch: 3157 avg_cer= 0.019230770\n",
+ "Epoch: 3158 avg_cer= 0.019230770\n",
+ "Epoch: 3159 avg_cer= 0.019230770\n",
+ "Epoch: 3160 avg_cer= 0.019230770\n",
+ "Epoch: 3161 avg_cer= 0.019230770\n",
+ "Epoch: 3162 avg_cer= 0.019230770\n",
+ "Epoch: 3163 avg_cer= 0.019230770\n",
+ "Epoch: 3164 avg_cer= 0.019230770\n",
+ "Epoch: 3165 avg_cer= 0.019230770\n",
+ "Epoch: 3166 avg_cer= 0.019230770\n",
+ "Epoch: 3167 avg_cer= 0.019230770\n",
+ "Epoch: 3168 avg_cer= 0.019230770\n",
+ "Epoch: 3169 avg_cer= 0.019230770\n",
+ "Epoch: 3170 avg_cer= 0.019230770\n",
+ "Epoch: 3171 avg_cer= 0.019230770\n",
+ "Epoch: 3172 avg_cer= 0.019230770\n",
+ "Epoch: 3173 avg_cer= 0.019230770\n",
+ "Epoch: 3174 avg_cer= 0.019230770\n",
+ "Epoch: 3175 avg_cer= 0.019230770\n",
+ "Epoch: 3176 avg_cer= 0.019230770\n",
+ "Epoch: 3177 avg_cer= 0.019230770\n",
+ "Epoch: 3178 avg_cer= 0.019230770\n",
+ "Epoch: 3179 avg_cer= 0.019230770\n",
+ "Epoch: 3180 avg_cer= 0.019230770\n",
+ "Epoch: 3181 avg_cer= 0.019230770\n",
+ "Epoch: 3182 avg_cer= 0.019230770\n",
+ "Epoch: 3183 avg_cer= 0.019230770\n",
+ "Epoch: 3184 avg_cer= 0.019230770\n",
+ "Epoch: 3185 avg_cer= 0.019230770\n",
+ "Epoch: 3186 avg_cer= 0.019230770\n",
+ "Epoch: 3187 avg_cer= 0.019230770\n",
+ "Epoch: 3188 avg_cer= 0.019230770\n",
+ "Epoch: 3189 avg_cer= 0.019230770\n",
+ "Epoch: 3190 avg_cer= 0.019230770\n",
+ "Epoch: 3191 avg_cer= 0.019230770\n",
+ "Epoch: 3192 avg_cer= 0.019230770\n",
+ "Epoch: 3193 avg_cer= 0.019230770\n",
+ "Epoch: 3194 avg_cer= 0.019230770\n",
+ "Epoch: 3195 avg_cer= 0.019230770\n",
+ "Epoch: 3196 avg_cer= 0.019230770\n",
+ "Epoch: 3197 avg_cer= 0.019230770\n",
+ "Epoch: 3198 avg_cer= 0.019230770\n",
+ "Epoch: 3199 avg_cer= 0.019230770\n",
+ "Epoch: 3200 avg_cer= 0.019230770\n",
+ "Epoch: 3201 avg_cer= 0.019230770\n",
+ "Epoch: 3202 avg_cer= 0.019230770\n",
+ "Epoch: 3203 avg_cer= 0.019230770\n",
+ "Epoch: 3204 avg_cer= 0.019230770\n",
+ "Epoch: 3205 avg_cer= 0.019230770\n",
+ "Epoch: 3206 avg_cer= 0.019230770\n",
+ "Epoch: 3207 avg_cer= 0.019230770\n",
+ "Epoch: 3208 avg_cer= 0.019230770\n",
+ "Epoch: 3209 avg_cer= 0.019230770\n",
+ "Epoch: 3210 avg_cer= 0.019230770\n",
+ "Epoch: 3211 avg_cer= 0.019230770\n",
+ "Epoch: 3212 avg_cer= 0.019230770\n",
+ "Epoch: 3213 avg_cer= 0.019230770\n",
+ "Epoch: 3214 avg_cer= 0.019230770\n",
+ "Epoch: 3215 avg_cer= 0.019230770\n",
+ "Epoch: 3216 avg_cer= 0.019230770\n",
+ "Epoch: 3217 avg_cer= 0.019230770\n",
+ "Epoch: 3218 avg_cer= 0.019230770\n",
+ "Epoch: 3219 avg_cer= 0.019230770\n",
+ "Epoch: 3220 avg_cer= 0.019230770\n",
+ "Epoch: 3221 avg_cer= 0.019230770\n",
+ "Epoch: 3222 avg_cer= 0.019230770\n",
+ "Epoch: 3223 avg_cer= 0.019230770\n",
+ "Epoch: 3224 avg_cer= 0.019230770\n",
+ "Epoch: 3225 avg_cer= 0.019230770\n",
+ "Epoch: 3226 avg_cer= 0.019230770\n",
+ "Epoch: 3227 avg_cer= 0.019230770\n",
+ "Epoch: 3228 avg_cer= 0.019230770\n",
+ "Epoch: 3229 avg_cer= 0.019230770\n",
+ "Epoch: 3230 avg_cer= 0.019230770\n",
+ "Epoch: 3231 avg_cer= 0.019230770\n",
+ "Epoch: 3232 avg_cer= 0.019230770\n",
+ "Epoch: 3233 avg_cer= 0.019230770\n",
+ "Epoch: 3234 avg_cer= 0.019230770\n",
+ "Epoch: 3235 avg_cer= 0.019230770\n",
+ "Epoch: 3236 avg_cer= 0.019230770\n",
+ "Epoch: 3237 avg_cer= 0.019230770\n",
+ "Epoch: 3238 avg_cer= 0.019230770\n",
+ "Epoch: 3239 avg_cer= 0.019230770\n",
+ "Epoch: 3240 avg_cer= 0.019230770\n",
+ "Epoch: 3241 avg_cer= 0.019230770\n",
+ "Epoch: 3242 avg_cer= 0.019230770\n",
+ "Epoch: 3243 avg_cer= 0.019230770\n",
+ "Epoch: 3244 avg_cer= 0.019230770\n",
+ "Epoch: 3245 avg_cer= 0.019230770\n",
+ "Epoch: 3246 avg_cer= 0.019230770\n",
+ "Epoch: 3247 avg_cer= 0.019230770\n",
+ "Epoch: 3248 avg_cer= 0.019230770\n",
+ "Epoch: 3249 avg_cer= 0.019230770\n",
+ "Epoch: 3250 avg_cer= 0.019230770\n",
+ "Epoch: 3251 avg_cer= 0.019230770\n",
+ "Epoch: 3252 avg_cer= 0.019230770\n",
+ "Epoch: 3253 avg_cer= 0.019230770\n",
+ "Epoch: 3254 avg_cer= 0.019230770\n",
+ "Epoch: 3255 avg_cer= 0.019230770\n",
+ "Epoch: 3256 avg_cer= 0.019230770\n",
+ "Epoch: 3257 avg_cer= 0.019230770\n",
+ "Epoch: 3258 avg_cer= 0.019230770\n",
+ "Epoch: 3259 avg_cer= 0.019230770\n",
+ "Epoch: 3260 avg_cer= 0.019230770\n",
+ "Epoch: 3261 avg_cer= 0.019230770\n",
+ "Epoch: 3262 avg_cer= 0.019230770\n",
+ "Epoch: 3263 avg_cer= 0.019230770\n",
+ "Epoch: 3264 avg_cer= 0.019230770\n",
+ "Epoch: 3265 avg_cer= 0.019230770\n",
+ "Epoch: 3266 avg_cer= 0.019230770\n",
+ "Epoch: 3267 avg_cer= 0.019230770\n",
+ "Epoch: 3268 avg_cer= 0.019230770\n",
+ "Epoch: 3269 avg_cer= 0.019230770\n",
+ "Epoch: 3270 avg_cer= 0.019230770\n",
+ "Epoch: 3271 avg_cer= 0.019230770\n",
+ "Epoch: 3272 avg_cer= 0.019230770\n",
+ "Epoch: 3273 avg_cer= 0.019230770\n",
+ "Epoch: 3274 avg_cer= 0.019230770\n",
+ "Epoch: 3275 avg_cer= 0.019230770\n",
+ "Epoch: 3276 avg_cer= 0.019230770\n",
+ "Epoch: 3277 avg_cer= 0.019230770\n",
+ "Epoch: 3278 avg_cer= 0.019230770\n",
+ "Epoch: 3279 avg_cer= 0.019230770\n",
+ "Epoch: 3280 avg_cer= 0.019230770\n",
+ "Epoch: 3281 avg_cer= 0.019230770\n",
+ "Epoch: 3282 avg_cer= 0.019230770\n",
+ "Epoch: 3283 avg_cer= 0.019230770\n",
+ "Epoch: 3284 avg_cer= 0.019230770\n",
+ "Epoch: 3285 avg_cer= 0.019230770\n",
+ "Epoch: 3286 avg_cer= 0.019230770\n",
+ "Epoch: 3287 avg_cer= 0.019230770\n",
+ "Epoch: 3288 avg_cer= 0.019230770\n",
+ "Epoch: 3289 avg_cer= 0.019230770\n",
+ "Epoch: 3290 avg_cer= 0.019230770\n",
+ "Epoch: 3291 avg_cer= 0.019230770\n",
+ "Epoch: 3292 avg_cer= 0.019230770\n",
+ "Epoch: 3293 avg_cer= 0.019230770\n",
+ "Epoch: 3294 avg_cer= 0.019230770\n",
+ "Epoch: 3295 avg_cer= 0.019230770\n",
+ "Epoch: 3296 avg_cer= 0.019230770\n",
+ "Epoch: 3297 avg_cer= 0.019230770\n",
+ "Epoch: 3298 avg_cer= 0.019230770\n",
+ "Epoch: 3299 avg_cer= 0.019230770\n",
+ "Epoch: 3300 avg_cer= 0.019230770\n",
+ "Epoch: 3301 avg_cer= 0.019230770\n",
+ "Epoch: 3302 avg_cer= 0.019230770\n",
+ "Epoch: 3303 avg_cer= 0.019230770\n",
+ "Epoch: 3304 avg_cer= 0.019230770\n",
+ "Epoch: 3305 avg_cer= 0.019230770\n",
+ "Epoch: 3306 avg_cer= 0.019230770\n",
+ "Epoch: 3307 avg_cer= 0.019230770\n",
+ "Epoch: 3308 avg_cer= 0.019230770\n",
+ "Epoch: 3309 avg_cer= 0.019230770\n",
+ "Epoch: 3310 avg_cer= 0.019230770\n",
+ "Epoch: 3311 avg_cer= 0.019230770\n",
+ "Epoch: 3312 avg_cer= 0.019230770\n",
+ "Epoch: 3313 avg_cer= 0.019230770\n",
+ "Epoch: 3314 avg_cer= 0.019230770\n",
+ "Epoch: 3315 avg_cer= 0.019230770\n",
+ "Epoch: 3316 avg_cer= 0.019230770\n",
+ "Epoch: 3317 avg_cer= 0.019230770\n",
+ "Epoch: 3318 avg_cer= 0.019230770\n",
+ "Epoch: 3319 avg_cer= 0.019230770\n",
+ "Epoch: 3320 avg_cer= 0.019230770\n",
+ "Epoch: 3321 avg_cer= 0.019230770\n",
+ "Epoch: 3322 avg_cer= 0.019230770\n",
+ "Epoch: 3323 avg_cer= 0.019230770\n",
+ "Epoch: 3324 avg_cer= 0.019230770\n",
+ "Epoch: 3325 avg_cer= 0.019230770\n",
+ "Epoch: 3326 avg_cer= 0.019230770\n",
+ "Epoch: 3327 avg_cer= 0.019230770\n",
+ "Epoch: 3328 avg_cer= 0.019230770\n",
+ "Epoch: 3329 avg_cer= 0.019230770\n",
+ "Epoch: 3330 avg_cer= 0.019230770\n",
+ "Epoch: 3331 avg_cer= 0.019230770\n",
+ "Epoch: 3332 avg_cer= 0.019230770\n",
+ "Epoch: 3333 avg_cer= 0.019230770\n",
+ "Epoch: 3334 avg_cer= 0.019230770\n",
+ "Epoch: 3335 avg_cer= 0.019230770\n",
+ "Epoch: 3336 avg_cer= 0.019230770\n",
+ "Epoch: 3337 avg_cer= 0.019230770\n",
+ "Epoch: 3338 avg_cer= 0.019230770\n",
+ "Epoch: 3339 avg_cer= 0.019230770\n",
+ "Epoch: 3340 avg_cer= 0.019230770\n",
+ "Epoch: 3341 avg_cer= 0.019230770\n",
+ "Epoch: 3342 avg_cer= 0.019230770\n",
+ "Epoch: 3343 avg_cer= 0.019230770\n",
+ "Epoch: 3344 avg_cer= 0.019230770\n",
+ "Epoch: 3345 avg_cer= 0.019230770\n",
+ "Epoch: 3346 avg_cer= 0.019230770\n",
+ "Epoch: 3347 avg_cer= 0.019230770\n",
+ "Epoch: 3348 avg_cer= 0.019230770\n",
+ "Epoch: 3349 avg_cer= 0.019230770\n",
+ "Epoch: 3350 avg_cer= 0.019230770\n",
+ "Epoch: 3351 avg_cer= 0.019230770\n",
+ "Epoch: 3352 avg_cer= 0.019230770\n",
+ "Epoch: 3353 avg_cer= 0.019230770\n",
+ "Epoch: 3354 avg_cer= 0.019230770\n",
+ "Epoch: 3355 avg_cer= 0.019230770\n",
+ "Epoch: 3356 avg_cer= 0.019230770\n",
+ "Epoch: 3357 avg_cer= 0.019230770\n",
+ "Epoch: 3358 avg_cer= 0.019230770\n",
+ "Epoch: 3359 avg_cer= 0.019230770\n",
+ "Epoch: 3360 avg_cer= 0.019230770\n",
+ "Epoch: 3361 avg_cer= 0.019230770\n",
+ "Epoch: 3362 avg_cer= 0.019230770\n",
+ "Epoch: 3363 avg_cer= 0.019230770\n",
+ "Epoch: 3364 avg_cer= 0.019230770\n",
+ "Epoch: 3365 avg_cer= 0.019230770\n",
+ "Epoch: 3366 avg_cer= 0.019230770\n",
+ "Epoch: 3367 avg_cer= 0.019230770\n",
+ "Epoch: 3368 avg_cer= 0.019230770\n",
+ "Epoch: 3369 avg_cer= 0.019230770\n",
+ "Epoch: 3370 avg_cer= 0.019230770\n",
+ "Epoch: 3371 avg_cer= 0.019230770\n",
+ "Epoch: 3372 avg_cer= 0.019230770\n",
+ "Epoch: 3373 avg_cer= 0.019230770\n",
+ "Epoch: 3374 avg_cer= 0.019230770\n",
+ "Epoch: 3375 avg_cer= 0.019230770\n",
+ "Epoch: 3376 avg_cer= 0.019230770\n",
+ "Epoch: 3377 avg_cer= 0.019230770\n",
+ "Epoch: 3378 avg_cer= 0.019230770\n",
+ "Epoch: 3379 avg_cer= 0.019230770\n",
+ "Epoch: 3380 avg_cer= 0.019230770\n",
+ "Epoch: 3381 avg_cer= 0.019230770\n",
+ "Epoch: 3382 avg_cer= 0.019230770\n",
+ "Epoch: 3383 avg_cer= 0.019230770\n",
+ "Epoch: 3384 avg_cer= 0.019230770\n",
+ "Epoch: 3385 avg_cer= 0.019230770\n",
+ "Epoch: 3386 avg_cer= 0.019230770\n",
+ "Epoch: 3387 avg_cer= 0.019230770\n",
+ "Epoch: 3388 avg_cer= 0.019230770\n",
+ "Epoch: 3389 avg_cer= 0.019230770\n",
+ "Epoch: 3390 avg_cer= 0.019230770\n",
+ "Epoch: 3391 avg_cer= 0.019230770\n",
+ "Epoch: 3392 avg_cer= 0.019230770\n",
+ "Epoch: 3393 avg_cer= 0.019230770\n",
+ "Epoch: 3394 avg_cer= 0.019230770\n",
+ "Epoch: 3395 avg_cer= 0.019230770\n",
+ "Epoch: 3396 avg_cer= 0.019230770\n",
+ "Epoch: 3397 avg_cer= 0.019230770\n",
+ "Epoch: 3398 avg_cer= 0.019230770\n",
+ "Epoch: 3399 avg_cer= 0.019230770\n",
+ "Epoch: 3400 avg_cer= 0.019230770\n",
+ "Epoch: 3401 avg_cer= 0.019230770\n",
+ "Epoch: 3402 avg_cer= 0.019230770\n",
+ "Epoch: 3403 avg_cer= 0.019230770\n",
+ "Epoch: 3404 avg_cer= 0.019230770\n",
+ "Epoch: 3405 avg_cer= 0.019230770\n",
+ "Epoch: 3406 avg_cer= 0.019230770\n",
+ "Epoch: 3407 avg_cer= 0.019230770\n",
+ "Epoch: 3408 avg_cer= 0.019230770\n",
+ "Epoch: 3409 avg_cer= 0.019230770\n",
+ "Epoch: 3410 avg_cer= 0.019230770\n",
+ "Epoch: 3411 avg_cer= 0.019230770\n",
+ "Epoch: 3412 avg_cer= 0.019230770\n",
+ "Epoch: 3413 avg_cer= 0.019230770\n",
+ "Epoch: 3414 avg_cer= 0.019230770\n",
+ "Epoch: 3415 avg_cer= 0.019230770\n",
+ "Epoch: 3416 avg_cer= 0.019230770\n",
+ "Epoch: 3417 avg_cer= 0.019230770\n",
+ "Epoch: 3418 avg_cer= 0.019230770\n",
+ "Epoch: 3419 avg_cer= 0.019230770\n",
+ "Epoch: 3420 avg_cer= 0.019230770\n",
+ "Epoch: 3421 avg_cer= 0.019230770\n",
+ "Epoch: 3422 avg_cer= 0.019230770\n",
+ "Epoch: 3423 avg_cer= 0.019230770\n",
+ "Epoch: 3424 avg_cer= 0.019230770\n",
+ "Epoch: 3425 avg_cer= 0.019230770\n",
+ "Epoch: 3426 avg_cer= 0.019230770\n",
+ "Epoch: 3427 avg_cer= 0.019230770\n",
+ "Epoch: 3428 avg_cer= 0.019230770\n",
+ "Epoch: 3429 avg_cer= 0.019230770\n",
+ "Epoch: 3430 avg_cer= 0.019230770\n",
+ "Epoch: 3431 avg_cer= 0.019230770\n",
+ "Epoch: 3432 avg_cer= 0.019230770\n",
+ "Epoch: 3433 avg_cer= 0.019230770\n",
+ "Epoch: 3434 avg_cer= 0.019230770\n",
+ "Epoch: 3435 avg_cer= 0.019230770\n",
+ "Epoch: 3436 avg_cer= 0.019230770\n",
+ "Epoch: 3437 avg_cer= 0.019230770\n",
+ "Epoch: 3438 avg_cer= 0.019230770\n",
+ "Epoch: 3439 avg_cer= 0.019230770\n",
+ "Epoch: 3440 avg_cer= 0.019230770\n",
+ "Epoch: 3441 avg_cer= 0.019230770\n",
+ "Epoch: 3442 avg_cer= 0.019230770\n",
+ "Epoch: 3443 avg_cer= 0.019230770\n",
+ "Epoch: 3444 avg_cer= 0.019230770\n",
+ "Epoch: 3445 avg_cer= 0.019230770\n",
+ "Epoch: 3446 avg_cer= 0.019230770\n",
+ "Epoch: 3447 avg_cer= 0.019230770\n",
+ "Epoch: 3448 avg_cer= 0.019230770\n",
+ "Epoch: 3449 avg_cer= 0.019230770\n",
+ "Epoch: 3450 avg_cer= 0.019230770\n",
+ "Epoch: 3451 avg_cer= 0.019230770\n",
+ "Epoch: 3452 avg_cer= 0.019230770\n",
+ "Epoch: 3453 avg_cer= 0.019230770\n",
+ "Epoch: 3454 avg_cer= 0.019230770\n",
+ "Epoch: 3455 avg_cer= 0.019230770\n",
+ "Epoch: 3456 avg_cer= 0.019230770\n",
+ "Epoch: 3457 avg_cer= 0.019230770\n",
+ "Epoch: 3458 avg_cer= 0.019230770\n",
+ "Epoch: 3459 avg_cer= 0.019230770\n",
+ "Epoch: 3460 avg_cer= 0.019230770\n",
+ "Epoch: 3461 avg_cer= 0.019230770\n",
+ "Epoch: 3462 avg_cer= 0.019230770\n",
+ "Epoch: 3463 avg_cer= 0.019230770\n",
+ "Epoch: 3464 avg_cer= 0.019230770\n",
+ "Epoch: 3465 avg_cer= 0.019230770\n",
+ "Epoch: 3466 avg_cer= 0.019230770\n",
+ "Epoch: 3467 avg_cer= 0.019230770\n",
+ "Epoch: 3468 avg_cer= 0.019230770\n",
+ "Epoch: 3469 avg_cer= 0.019230770\n",
+ "Epoch: 3470 avg_cer= 0.019230770\n",
+ "Epoch: 3471 avg_cer= 0.019230770\n",
+ "Epoch: 3472 avg_cer= 0.019230770\n",
+ "Epoch: 3473 avg_cer= 0.019230770\n",
+ "Epoch: 3474 avg_cer= 0.019230770\n",
+ "Epoch: 3475 avg_cer= 0.019230770\n",
+ "Epoch: 3476 avg_cer= 0.019230770\n",
+ "Epoch: 3477 avg_cer= 0.019230770\n",
+ "Epoch: 3478 avg_cer= 0.019230770\n",
+ "Epoch: 3479 avg_cer= 0.019230770\n",
+ "Epoch: 3480 avg_cer= 0.019230770\n",
+ "Epoch: 3481 avg_cer= 0.019230770\n",
+ "Epoch: 3482 avg_cer= 0.019230770\n",
+ "Epoch: 3483 avg_cer= 0.019230770\n",
+ "Epoch: 3484 avg_cer= 0.019230770\n",
+ "Epoch: 3485 avg_cer= 0.019230770\n",
+ "Epoch: 3486 avg_cer= 0.019230770\n",
+ "Epoch: 3487 avg_cer= 0.019230770\n",
+ "Epoch: 3488 avg_cer= 0.019230770\n",
+ "Epoch: 3489 avg_cer= 0.019230770\n",
+ "Epoch: 3490 avg_cer= 0.019230770\n",
+ "Epoch: 3491 avg_cer= 0.019230770\n",
+ "Epoch: 3492 avg_cer= 0.019230770\n",
+ "Epoch: 3493 avg_cer= 0.019230770\n",
+ "Epoch: 3494 avg_cer= 0.019230770\n",
+ "Epoch: 3495 avg_cer= 0.019230770\n",
+ "Epoch: 3496 avg_cer= 0.019230770\n",
+ "Epoch: 3497 avg_cer= 0.019230770\n",
+ "Epoch: 3498 avg_cer= 0.019230770\n",
+ "Epoch: 3499 avg_cer= 0.019230770\n",
+ "Epoch: 3500 avg_cer= 0.019230770\n",
+ "Epoch: 3501 avg_cer= 0.019230770\n",
+ "Epoch: 3502 avg_cer= 0.019230770\n",
+ "Epoch: 3503 avg_cer= 0.019230770\n",
+ "Epoch: 3504 avg_cer= 0.019230770\n",
+ "Epoch: 3505 avg_cer= 0.019230770\n",
+ "Epoch: 3506 avg_cer= 0.019230770\n",
+ "Epoch: 3507 avg_cer= 0.019230770\n",
+ "Epoch: 3508 avg_cer= 0.019230770\n",
+ "Epoch: 3509 avg_cer= 0.019230770\n",
+ "Epoch: 3510 avg_cer= 0.019230770\n",
+ "Epoch: 3511 avg_cer= 0.019230770\n",
+ "Epoch: 3512 avg_cer= 0.019230770\n",
+ "Epoch: 3513 avg_cer= 0.019230770\n",
+ "Epoch: 3514 avg_cer= 0.019230770\n",
+ "Epoch: 3515 avg_cer= 0.019230770\n",
+ "Epoch: 3516 avg_cer= 0.019230770\n",
+ "Epoch: 3517 avg_cer= 0.019230770\n",
+ "Epoch: 3518 avg_cer= 0.019230770\n",
+ "Epoch: 3519 avg_cer= 0.019230770\n",
+ "Epoch: 3520 avg_cer= 0.019230770\n",
+ "Epoch: 3521 avg_cer= 0.019230770\n",
+ "Epoch: 3522 avg_cer= 0.019230770\n",
+ "Epoch: 3523 avg_cer= 0.019230770\n",
+ "Epoch: 3524 avg_cer= 0.019230770\n",
+ "Epoch: 3525 avg_cer= 0.019230770\n",
+ "Epoch: 3526 avg_cer= 0.019230770\n",
+ "Epoch: 3527 avg_cer= 0.019230770\n",
+ "Epoch: 3528 avg_cer= 0.019230770\n",
+ "Epoch: 3529 avg_cer= 0.019230770\n",
+ "Epoch: 3530 avg_cer= 0.019230770\n",
+ "Epoch: 3531 avg_cer= 0.019230770\n",
+ "Epoch: 3532 avg_cer= 0.019230770\n",
+ "Epoch: 3533 avg_cer= 0.019230770\n",
+ "Epoch: 3534 avg_cer= 0.019230770\n",
+ "Epoch: 3535 avg_cer= 0.019230770\n",
+ "Epoch: 3536 avg_cer= 0.019230770\n",
+ "Epoch: 3537 avg_cer= 0.019230770\n",
+ "Epoch: 3538 avg_cer= 0.019230770\n",
+ "Epoch: 3539 avg_cer= 0.019230770\n",
+ "Epoch: 3540 avg_cer= 0.019230770\n",
+ "Epoch: 3541 avg_cer= 0.019230770\n",
+ "Epoch: 3542 avg_cer= 0.019230770\n",
+ "Epoch: 3543 avg_cer= 0.019230770\n",
+ "Epoch: 3544 avg_cer= 0.019230770\n",
+ "Epoch: 3545 avg_cer= 0.019230770\n",
+ "Epoch: 3546 avg_cer= 0.019230770\n",
+ "Epoch: 3547 avg_cer= 0.019230770\n",
+ "Epoch: 3548 avg_cer= 0.019230770\n",
+ "Epoch: 3549 avg_cer= 0.019230770\n",
+ "Epoch: 3550 avg_cer= 0.019230770\n",
+ "Epoch: 3551 avg_cer= 0.019230770\n",
+ "Epoch: 3552 avg_cer= 0.019230770\n",
+ "Epoch: 3553 avg_cer= 0.019230770\n",
+ "Epoch: 3554 avg_cer= 0.019230770\n",
+ "Epoch: 3555 avg_cer= 0.019230770\n",
+ "Epoch: 3556 avg_cer= 0.019230770\n",
+ "Epoch: 3557 avg_cer= 0.019230770\n",
+ "Epoch: 3558 avg_cer= 0.019230770\n",
+ "Epoch: 3559 avg_cer= 0.019230770\n",
+ "Epoch: 3560 avg_cer= 0.019230770\n",
+ "Epoch: 3561 avg_cer= 0.019230770\n",
+ "Epoch: 3562 avg_cer= 0.019230770\n",
+ "Epoch: 3563 avg_cer= 0.019230770\n",
+ "Epoch: 3564 avg_cer= 0.019230770\n",
+ "Epoch: 3565 avg_cer= 0.019230770\n",
+ "Epoch: 3566 avg_cer= 0.019230770\n",
+ "Epoch: 3567 avg_cer= 0.019230770\n",
+ "Epoch: 3568 avg_cer= 0.019230770\n",
+ "Epoch: 3569 avg_cer= 0.019230770\n",
+ "Epoch: 3570 avg_cer= 0.019230770\n",
+ "Epoch: 3571 avg_cer= 0.019230770\n",
+ "Epoch: 3572 avg_cer= 0.019230770\n",
+ "Epoch: 3573 avg_cer= 0.019230770\n",
+ "Epoch: 3574 avg_cer= 0.019230770\n",
+ "Epoch: 3575 avg_cer= 0.019230770\n",
+ "Epoch: 3576 avg_cer= 0.019230770\n",
+ "Epoch: 3577 avg_cer= 0.019230770\n",
+ "Epoch: 3578 avg_cer= 0.019230770\n",
+ "Epoch: 3579 avg_cer= 0.019230770\n",
+ "Epoch: 3580 avg_cer= 0.019230770\n",
+ "Epoch: 3581 avg_cer= 0.019230770\n",
+ "Epoch: 3582 avg_cer= 0.019230770\n",
+ "Epoch: 3583 avg_cer= 0.019230770\n",
+ "Epoch: 3584 avg_cer= 0.019230770\n",
+ "Epoch: 3585 avg_cer= 0.019230770\n",
+ "Epoch: 3586 avg_cer= 0.019230770\n",
+ "Epoch: 3587 avg_cer= 0.019230770\n",
+ "Epoch: 3588 avg_cer= 0.019230770\n",
+ "Epoch: 3589 avg_cer= 0.019230770\n",
+ "Epoch: 3590 avg_cer= 0.019230770\n",
+ "Epoch: 3591 avg_cer= 0.019230770\n",
+ "Epoch: 3592 avg_cer= 0.019230770\n",
+ "Epoch: 3593 avg_cer= 0.019230770\n",
+ "Epoch: 3594 avg_cer= 0.019230770\n",
+ "Epoch: 3595 avg_cer= 0.019230770\n",
+ "Epoch: 3596 avg_cer= 0.019230770\n",
+ "Epoch: 3597 avg_cer= 0.019230770\n",
+ "Epoch: 3598 avg_cer= 0.019230770\n",
+ "Epoch: 3599 avg_cer= 0.019230770\n",
+ "Epoch: 3600 avg_cer= 0.019230770\n",
+ "Epoch: 3601 avg_cer= 0.019230770\n",
+ "Epoch: 3602 avg_cer= 0.019230770\n",
+ "Epoch: 3603 avg_cer= 0.019230770\n",
+ "Epoch: 3604 avg_cer= 0.019230770\n",
+ "Epoch: 3605 avg_cer= 0.019230770\n",
+ "Epoch: 3606 avg_cer= 0.019230770\n",
+ "Epoch: 3607 avg_cer= 0.019230770\n",
+ "Epoch: 3608 avg_cer= 0.019230770\n",
+ "Epoch: 3609 avg_cer= 0.019230770\n",
+ "Epoch: 3610 avg_cer= 0.019230770\n",
+ "Epoch: 3611 avg_cer= 0.019230770\n",
+ "Epoch: 3612 avg_cer= 0.019230770\n",
+ "Epoch: 3613 avg_cer= 0.019230770\n",
+ "Epoch: 3614 avg_cer= 0.019230770\n",
+ "Epoch: 3615 avg_cer= 0.019230770\n",
+ "Epoch: 3616 avg_cer= 0.019230770\n",
+ "Epoch: 3617 avg_cer= 0.019230770\n",
+ "Epoch: 3618 avg_cer= 0.019230770\n",
+ "Epoch: 3619 avg_cer= 0.019230770\n",
+ "Epoch: 3620 avg_cer= 0.019230770\n",
+ "Epoch: 3621 avg_cer= 0.019230770\n",
+ "Epoch: 3622 avg_cer= 0.019230770\n",
+ "Epoch: 3623 avg_cer= 0.019230770\n",
+ "Epoch: 3624 avg_cer= 0.019230770\n",
+ "Epoch: 3625 avg_cer= 0.019230770\n",
+ "Epoch: 3626 avg_cer= 0.019230770\n",
+ "Epoch: 3627 avg_cer= 0.019230770\n",
+ "Epoch: 3628 avg_cer= 0.019230770\n",
+ "Epoch: 3629 avg_cer= 0.019230770\n",
+ "Epoch: 3630 avg_cer= 0.019230770\n",
+ "Epoch: 3631 avg_cer= 0.019230770\n",
+ "Epoch: 3632 avg_cer= 0.019230770\n",
+ "Epoch: 3633 avg_cer= 0.019230770\n",
+ "Epoch: 3634 avg_cer= 0.019230770\n",
+ "Epoch: 3635 avg_cer= 0.019230770\n",
+ "Epoch: 3636 avg_cer= 0.019230770\n",
+ "Epoch: 3637 avg_cer= 0.019230770\n",
+ "Epoch: 3638 avg_cer= 0.019230770\n",
+ "Epoch: 3639 avg_cer= 0.019230770\n",
+ "Epoch: 3640 avg_cer= 0.019230770\n",
+ "Epoch: 3641 avg_cer= 0.019230770\n",
+ "Epoch: 3642 avg_cer= 0.019230770\n",
+ "Epoch: 3643 avg_cer= 0.019230770\n",
+ "Epoch: 3644 avg_cer= 0.019230770\n",
+ "Epoch: 3645 avg_cer= 0.019230770\n",
+ "Epoch: 3646 avg_cer= 0.019230770\n",
+ "Epoch: 3647 avg_cer= 0.019230770\n",
+ "Epoch: 3648 avg_cer= 0.019230770\n",
+ "Epoch: 3649 avg_cer= 0.019230770\n",
+ "Epoch: 3650 avg_cer= 0.019230770\n",
+ "Epoch: 3651 avg_cer= 0.019230770\n",
+ "Epoch: 3652 avg_cer= 0.019230770\n",
+ "Epoch: 3653 avg_cer= 0.019230770\n",
+ "Epoch: 3654 avg_cer= 0.019230770\n",
+ "Epoch: 3655 avg_cer= 0.019230770\n",
+ "Epoch: 3656 avg_cer= 0.019230770\n",
+ "Epoch: 3657 avg_cer= 0.019230770\n",
+ "Epoch: 3658 avg_cer= 0.019230770\n",
+ "Epoch: 3659 avg_cer= 0.019230770\n",
+ "Epoch: 3660 avg_cer= 0.019230770\n",
+ "Epoch: 3661 avg_cer= 0.019230770\n",
+ "Epoch: 3662 avg_cer= 0.019230770\n",
+ "Epoch: 3663 avg_cer= 0.019230770\n",
+ "Epoch: 3664 avg_cer= 0.019230770\n",
+ "Epoch: 3665 avg_cer= 0.019230770\n",
+ "Epoch: 3666 avg_cer= 0.019230770\n",
+ "Epoch: 3667 avg_cer= 0.019230770\n",
+ "Epoch: 3668 avg_cer= 0.019230770\n",
+ "Epoch: 3669 avg_cer= 0.019230770\n",
+ "Epoch: 3670 avg_cer= 0.019230770\n",
+ "Epoch: 3671 avg_cer= 0.019230770\n",
+ "Epoch: 3672 avg_cer= 0.019230770\n",
+ "Epoch: 3673 avg_cer= 0.019230770\n",
+ "Epoch: 3674 avg_cer= 0.019230770\n",
+ "Epoch: 3675 avg_cer= 0.019230770\n",
+ "Epoch: 3676 avg_cer= 0.019230770\n",
+ "Epoch: 3677 avg_cer= 0.019230770\n",
+ "Epoch: 3678 avg_cer= 0.019230770\n",
+ "Epoch: 3679 avg_cer= 0.019230770\n",
+ "Epoch: 3680 avg_cer= 0.019230770\n",
+ "Epoch: 3681 avg_cer= 0.019230770\n",
+ "Epoch: 3682 avg_cer= 0.019230770\n",
+ "Epoch: 3683 avg_cer= 0.019230770\n",
+ "Epoch: 3684 avg_cer= 0.019230770\n",
+ "Epoch: 3685 avg_cer= 0.019230770\n",
+ "Epoch: 3686 avg_cer= 0.019230770\n",
+ "Epoch: 3687 avg_cer= 0.019230770\n",
+ "Epoch: 3688 avg_cer= 0.019230770\n",
+ "Epoch: 3689 avg_cer= 0.019230770\n",
+ "Epoch: 3690 avg_cer= 0.019230770\n",
+ "Epoch: 3691 avg_cer= 0.019230770\n",
+ "Epoch: 3692 avg_cer= 0.019230770\n",
+ "Epoch: 3693 avg_cer= 0.019230770\n",
+ "Epoch: 3694 avg_cer= 0.019230770\n",
+ "Epoch: 3695 avg_cer= 0.019230770\n",
+ "Epoch: 3696 avg_cer= 0.019230770\n",
+ "Epoch: 3697 avg_cer= 0.019230770\n",
+ "Epoch: 3698 avg_cer= 0.019230770\n",
+ "Epoch: 3699 avg_cer= 0.019230770\n",
+ "Epoch: 3700 avg_cer= 0.019230770\n",
+ "Epoch: 3701 avg_cer= 0.019230770\n",
+ "Epoch: 3702 avg_cer= 0.019230770\n",
+ "Epoch: 3703 avg_cer= 0.019230770\n",
+ "Epoch: 3704 avg_cer= 0.019230770\n",
+ "Epoch: 3705 avg_cer= 0.019230770\n",
+ "Epoch: 3706 avg_cer= 0.019230770\n",
+ "Epoch: 3707 avg_cer= 0.019230770\n",
+ "Epoch: 3708 avg_cer= 0.019230770\n",
+ "Epoch: 3709 avg_cer= 0.019230770\n",
+ "Epoch: 3710 avg_cer= 0.019230770\n",
+ "Epoch: 3711 avg_cer= 0.019230770\n",
+ "Epoch: 3712 avg_cer= 0.019230770\n",
+ "Epoch: 3713 avg_cer= 0.019230770\n",
+ "Epoch: 3714 avg_cer= 0.019230770\n",
+ "Epoch: 3715 avg_cer= 0.019230770\n",
+ "Epoch: 3716 avg_cer= 0.019230770\n",
+ "Epoch: 3717 avg_cer= 0.019230770\n",
+ "Epoch: 3718 avg_cer= 0.019230770\n",
+ "Epoch: 3719 avg_cer= 0.019230770\n",
+ "Epoch: 3720 avg_cer= 0.019230770\n",
+ "Epoch: 3721 avg_cer= 0.019230770\n",
+ "Epoch: 3722 avg_cer= 0.019230770\n",
+ "Epoch: 3723 avg_cer= 0.019230770\n",
+ "Epoch: 3724 avg_cer= 0.019230770\n",
+ "Epoch: 3725 avg_cer= 0.019230770\n",
+ "Epoch: 3726 avg_cer= 0.019230770\n",
+ "Epoch: 3727 avg_cer= 0.019230770\n",
+ "Epoch: 3728 avg_cer= 0.019230770\n",
+ "Epoch: 3729 avg_cer= 0.019230770\n",
+ "Epoch: 3730 avg_cer= 0.019230770\n",
+ "Epoch: 3731 avg_cer= 0.019230770\n",
+ "Epoch: 3732 avg_cer= 0.019230770\n",
+ "Epoch: 3733 avg_cer= 0.019230770\n",
+ "Epoch: 3734 avg_cer= 0.019230770\n",
+ "Epoch: 3735 avg_cer= 0.019230770\n",
+ "Epoch: 3736 avg_cer= 0.019230770\n",
+ "Epoch: 3737 avg_cer= 0.019230770\n",
+ "Epoch: 3738 avg_cer= 0.019230770\n",
+ "Epoch: 3739 avg_cer= 0.019230770\n",
+ "Epoch: 3740 avg_cer= 0.019230770\n",
+ "Epoch: 3741 avg_cer= 0.019230770\n",
+ "Epoch: 3742 avg_cer= 0.019230770\n",
+ "Epoch: 3743 avg_cer= 0.019230770\n",
+ "Epoch: 3744 avg_cer= 0.019230770\n",
+ "Epoch: 3745 avg_cer= 0.019230770\n",
+ "Epoch: 3746 avg_cer= 0.019230770\n",
+ "Epoch: 3747 avg_cer= 0.019230770\n",
+ "Epoch: 3748 avg_cer= 0.019230770\n",
+ "Epoch: 3749 avg_cer= 0.019230770\n",
+ "Epoch: 3750 avg_cer= 0.019230770\n",
+ "Epoch: 3751 avg_cer= 0.019230770\n",
+ "Epoch: 3752 avg_cer= 0.019230770\n",
+ "Epoch: 3753 avg_cer= 0.019230770\n",
+ "Epoch: 3754 avg_cer= 0.019230770\n",
+ "Epoch: 3755 avg_cer= 0.019230770\n",
+ "Epoch: 3756 avg_cer= 0.019230770\n",
+ "Epoch: 3757 avg_cer= 0.019230770\n",
+ "Epoch: 3758 avg_cer= 0.019230770\n",
+ "Epoch: 3759 avg_cer= 0.019230770\n",
+ "Epoch: 3760 avg_cer= 0.019230770\n",
+ "Epoch: 3761 avg_cer= 0.019230770\n",
+ "Epoch: 3762 avg_cer= 0.019230770\n",
+ "Epoch: 3763 avg_cer= 0.019230770\n",
+ "Epoch: 3764 avg_cer= 0.019230770\n",
+ "Epoch: 3765 avg_cer= 0.019230770\n",
+ "Epoch: 3766 avg_cer= 0.019230770\n",
+ "Epoch: 3767 avg_cer= 0.019230770\n",
+ "Epoch: 3768 avg_cer= 0.019230770\n",
+ "Epoch: 3769 avg_cer= 0.019230770\n",
+ "Epoch: 3770 avg_cer= 0.019230770\n",
+ "Epoch: 3771 avg_cer= 0.019230770\n",
+ "Epoch: 3772 avg_cer= 0.019230770\n",
+ "Epoch: 3773 avg_cer= 0.019230770\n",
+ "Epoch: 3774 avg_cer= 0.019230770\n",
+ "Epoch: 3775 avg_cer= 0.019230770\n",
+ "Epoch: 3776 avg_cer= 0.019230770\n",
+ "Epoch: 3777 avg_cer= 0.019230770\n",
+ "Epoch: 3778 avg_cer= 0.019230770\n",
+ "Epoch: 3779 avg_cer= 0.019230770\n",
+ "Epoch: 3780 avg_cer= 0.019230770\n",
+ "Epoch: 3781 avg_cer= 0.019230770\n",
+ "Epoch: 3782 avg_cer= 0.019230770\n",
+ "Epoch: 3783 avg_cer= 0.019230770\n",
+ "Epoch: 3784 avg_cer= 0.019230770\n",
+ "Epoch: 3785 avg_cer= 0.019230770\n",
+ "Epoch: 3786 avg_cer= 0.019230770\n",
+ "Epoch: 3787 avg_cer= 0.019230770\n",
+ "Epoch: 3788 avg_cer= 0.019230770\n",
+ "Epoch: 3789 avg_cer= 0.019230770\n",
+ "Epoch: 3790 avg_cer= 0.019230770\n",
+ "Epoch: 3791 avg_cer= 0.019230770\n",
+ "Epoch: 3792 avg_cer= 0.019230770\n",
+ "Epoch: 3793 avg_cer= 0.019230770\n",
+ "Epoch: 3794 avg_cer= 0.019230770\n",
+ "Epoch: 3795 avg_cer= 0.019230770\n",
+ "Epoch: 3796 avg_cer= 0.019230770\n",
+ "Epoch: 3797 avg_cer= 0.019230770\n",
+ "Epoch: 3798 avg_cer= 0.019230770\n",
+ "Epoch: 3799 avg_cer= 0.019230770\n",
+ "Epoch: 3800 avg_cer= 0.019230770\n",
+ "Epoch: 3801 avg_cer= 0.019230770\n",
+ "Epoch: 3802 avg_cer= 0.019230770\n",
+ "Epoch: 3803 avg_cer= 0.019230770\n",
+ "Epoch: 3804 avg_cer= 0.019230770\n",
+ "Epoch: 3805 avg_cer= 0.019230770\n",
+ "Epoch: 3806 avg_cer= 0.019230770\n",
+ "Epoch: 3807 avg_cer= 0.019230770\n",
+ "Epoch: 3808 avg_cer= 0.019230770\n",
+ "Epoch: 3809 avg_cer= 0.019230770\n",
+ "Epoch: 3810 avg_cer= 0.019230770\n",
+ "Epoch: 3811 avg_cer= 0.019230770\n",
+ "Epoch: 3812 avg_cer= 0.019230770\n",
+ "Epoch: 3813 avg_cer= 0.019230770\n",
+ "Epoch: 3814 avg_cer= 0.019230770\n",
+ "Epoch: 3815 avg_cer= 0.019230770\n",
+ "Epoch: 3816 avg_cer= 0.019230770\n",
+ "Epoch: 3817 avg_cer= 0.019230770\n",
+ "Epoch: 3818 avg_cer= 0.019230770\n",
+ "Epoch: 3819 avg_cer= 0.019230770\n",
+ "Epoch: 3820 avg_cer= 0.019230770\n",
+ "Epoch: 3821 avg_cer= 0.019230770\n",
+ "Epoch: 3822 avg_cer= 0.019230770\n",
+ "Epoch: 3823 avg_cer= 0.019230770\n",
+ "Epoch: 3824 avg_cer= 0.019230770\n",
+ "Epoch: 3825 avg_cer= 0.019230770\n",
+ "Epoch: 3826 avg_cer= 0.019230770\n",
+ "Epoch: 3827 avg_cer= 0.019230770\n",
+ "Epoch: 3828 avg_cer= 0.019230770\n",
+ "Epoch: 3829 avg_cer= 0.019230770\n",
+ "Epoch: 3830 avg_cer= 0.019230770\n",
+ "Epoch: 3831 avg_cer= 0.019230770\n",
+ "Epoch: 3832 avg_cer= 0.019230770\n",
+ "Epoch: 3833 avg_cer= 0.019230770\n",
+ "Epoch: 3834 avg_cer= 0.019230770\n",
+ "Epoch: 3835 avg_cer= 0.019230770\n",
+ "Epoch: 3836 avg_cer= 0.019230770\n",
+ "Epoch: 3837 avg_cer= 0.019230770\n",
+ "Epoch: 3838 avg_cer= 0.019230770\n",
+ "Epoch: 3839 avg_cer= 0.019230770\n",
+ "Epoch: 3840 avg_cer= 0.019230770\n",
+ "Epoch: 3841 avg_cer= 0.019230770\n",
+ "Epoch: 3842 avg_cer= 0.019230770\n",
+ "Epoch: 3843 avg_cer= 0.019230770\n",
+ "Epoch: 3844 avg_cer= 0.019230770\n",
+ "Epoch: 3845 avg_cer= 0.019230770\n",
+ "Epoch: 3846 avg_cer= 0.019230770\n",
+ "Epoch: 3847 avg_cer= 0.019230770\n",
+ "Epoch: 3848 avg_cer= 0.019230770\n",
+ "Epoch: 3849 avg_cer= 0.019230770\n",
+ "Epoch: 3850 avg_cer= 0.019230770\n",
+ "Epoch: 3851 avg_cer= 0.019230770\n",
+ "Epoch: 3852 avg_cer= 0.019230770\n",
+ "Epoch: 3853 avg_cer= 0.019230770\n",
+ "Epoch: 3854 avg_cer= 0.019230770\n",
+ "Epoch: 3855 avg_cer= 0.019230770\n",
+ "Epoch: 3856 avg_cer= 0.019230770\n",
+ "Epoch: 3857 avg_cer= 0.019230770\n",
+ "Epoch: 3858 avg_cer= 0.019230770\n",
+ "Epoch: 3859 avg_cer= 0.019230770\n",
+ "Epoch: 3860 avg_cer= 0.019230770\n",
+ "Epoch: 3861 avg_cer= 0.019230770\n",
+ "Epoch: 3862 avg_cer= 0.019230770\n",
+ "Epoch: 3863 avg_cer= 0.019230770\n",
+ "Epoch: 3864 avg_cer= 0.019230770\n",
+ "Epoch: 3865 avg_cer= 0.019230770\n",
+ "Epoch: 3866 avg_cer= 0.019230770\n",
+ "Epoch: 3867 avg_cer= 0.019230770\n",
+ "Epoch: 3868 avg_cer= 0.019230770\n",
+ "Epoch: 3869 avg_cer= 0.019230770\n",
+ "Epoch: 3870 avg_cer= 0.019230770\n",
+ "Epoch: 3871 avg_cer= 0.019230770\n",
+ "Epoch: 3872 avg_cer= 0.019230770\n",
+ "Epoch: 3873 avg_cer= 0.019230770\n",
+ "Epoch: 3874 avg_cer= 0.019230770\n",
+ "Epoch: 3875 avg_cer= 0.019230770\n",
+ "Epoch: 3876 avg_cer= 0.019230770\n",
+ "Epoch: 3877 avg_cer= 0.019230770\n",
+ "Epoch: 3878 avg_cer= 0.019230770\n",
+ "Epoch: 3879 avg_cer= 0.019230770\n",
+ "Epoch: 3880 avg_cer= 0.019230770\n",
+ "Epoch: 3881 avg_cer= 0.019230770\n",
+ "Epoch: 3882 avg_cer= 0.019230770\n",
+ "Epoch: 3883 avg_cer= 0.019230770\n",
+ "Epoch: 3884 avg_cer= 0.019230770\n",
+ "Epoch: 3885 avg_cer= 0.019230770\n",
+ "Epoch: 3886 avg_cer= 0.019230770\n",
+ "Epoch: 3887 avg_cer= 0.019230770\n",
+ "Epoch: 3888 avg_cer= 0.019230770\n",
+ "Epoch: 3889 avg_cer= 0.019230770\n",
+ "Epoch: 3890 avg_cer= 0.019230770\n",
+ "Epoch: 3891 avg_cer= 0.019230770\n",
+ "Epoch: 3892 avg_cer= 0.019230770\n",
+ "Epoch: 3893 avg_cer= 0.019230770\n",
+ "Epoch: 3894 avg_cer= 0.019230770\n",
+ "Epoch: 3895 avg_cer= 0.019230770\n",
+ "Epoch: 3896 avg_cer= 0.019230770\n",
+ "Epoch: 3897 avg_cer= 0.019230770\n",
+ "Epoch: 3898 avg_cer= 0.019230770\n",
+ "Epoch: 3899 avg_cer= 0.019230770\n",
+ "Epoch: 3900 avg_cer= 0.019230770\n",
+ "Epoch: 3901 avg_cer= 0.019230770\n",
+ "Epoch: 3902 avg_cer= 0.019230770\n",
+ "Epoch: 3903 avg_cer= 0.019230770\n",
+ "Epoch: 3904 avg_cer= 0.019230770\n",
+ "Epoch: 3905 avg_cer= 0.019230770\n",
+ "Epoch: 3906 avg_cer= 0.019230770\n",
+ "Epoch: 3907 avg_cer= 0.019230770\n",
+ "Epoch: 3908 avg_cer= 0.019230770\n",
+ "Epoch: 3909 avg_cer= 0.019230770\n",
+ "Epoch: 3910 avg_cer= 0.019230770\n",
+ "Epoch: 3911 avg_cer= 0.019230770\n",
+ "Epoch: 3912 avg_cer= 0.019230770\n",
+ "Epoch: 3913 avg_cer= 0.019230770\n",
+ "Epoch: 3914 avg_cer= 0.019230770\n",
+ "Epoch: 3915 avg_cer= 0.019230770\n",
+ "Epoch: 3916 avg_cer= 0.019230770\n",
+ "Epoch: 3917 avg_cer= 0.019230770\n",
+ "Epoch: 3918 avg_cer= 0.019230770\n",
+ "Epoch: 3919 avg_cer= 0.019230770\n",
+ "Epoch: 3920 avg_cer= 0.019230770\n",
+ "Epoch: 3921 avg_cer= 0.019230770\n",
+ "Epoch: 3922 avg_cer= 0.019230770\n",
+ "Epoch: 3923 avg_cer= 0.019230770\n",
+ "Epoch: 3924 avg_cer= 0.019230770\n",
+ "Epoch: 3925 avg_cer= 0.019230770\n",
+ "Epoch: 3926 avg_cer= 0.019230770\n",
+ "Epoch: 3927 avg_cer= 0.019230770\n",
+ "Epoch: 3928 avg_cer= 0.019230770\n",
+ "Epoch: 3929 avg_cer= 0.019230770\n",
+ "Epoch: 3930 avg_cer= 0.019230770\n",
+ "Epoch: 3931 avg_cer= 0.019230770\n",
+ "Epoch: 3932 avg_cer= 0.019230770\n",
+ "Epoch: 3933 avg_cer= 0.019230770\n",
+ "Epoch: 3934 avg_cer= 0.019230770\n",
+ "Epoch: 3935 avg_cer= 0.019230770\n",
+ "Epoch: 3936 avg_cer= 0.019230770\n",
+ "Epoch: 3937 avg_cer= 0.019230770\n",
+ "Epoch: 3938 avg_cer= 0.019230770\n",
+ "Epoch: 3939 avg_cer= 0.019230770\n",
+ "Epoch: 3940 avg_cer= 0.019230770\n",
+ "Epoch: 3941 avg_cer= 0.019230770\n",
+ "Epoch: 3942 avg_cer= 0.019230770\n",
+ "Epoch: 3943 avg_cer= 0.019230770\n",
+ "Epoch: 3944 avg_cer= 0.019230770\n",
+ "Epoch: 3945 avg_cer= 0.019230770\n",
+ "Epoch: 3946 avg_cer= 0.019230770\n",
+ "Epoch: 3947 avg_cer= 0.019230770\n",
+ "Epoch: 3948 avg_cer= 0.019230770\n",
+ "Epoch: 3949 avg_cer= 0.019230770\n",
+ "Epoch: 3950 avg_cer= 0.019230770\n",
+ "Epoch: 3951 avg_cer= 0.019230770\n",
+ "Epoch: 3952 avg_cer= 0.019230770\n",
+ "Epoch: 3953 avg_cer= 0.019230770\n",
+ "Epoch: 3954 avg_cer= 0.019230770\n",
+ "Epoch: 3955 avg_cer= 0.019230770\n",
+ "Epoch: 3956 avg_cer= 0.019230770\n",
+ "Epoch: 3957 avg_cer= 0.019230770\n",
+ "Epoch: 3958 avg_cer= 0.019230770\n",
+ "Epoch: 3959 avg_cer= 0.019230770\n",
+ "Epoch: 3960 avg_cer= 0.019230770\n",
+ "Epoch: 3961 avg_cer= 0.019230770\n",
+ "Epoch: 3962 avg_cer= 0.019230770\n",
+ "Epoch: 3963 avg_cer= 0.019230770\n",
+ "Epoch: 3964 avg_cer= 0.019230770\n",
+ "Epoch: 3965 avg_cer= 0.019230770\n",
+ "Epoch: 3966 avg_cer= 0.019230770\n",
+ "Epoch: 3967 avg_cer= 0.019230770\n",
+ "Epoch: 3968 avg_cer= 0.019230770\n",
+ "Epoch: 3969 avg_cer= 0.019230770\n",
+ "Epoch: 3970 avg_cer= 0.019230770\n",
+ "Epoch: 3971 avg_cer= 0.019230770\n",
+ "Epoch: 3972 avg_cer= 0.019230770\n",
+ "Epoch: 3973 avg_cer= 0.019230770\n",
+ "Epoch: 3974 avg_cer= 0.019230770\n",
+ "Epoch: 3975 avg_cer= 0.019230770\n",
+ "Epoch: 3976 avg_cer= 0.019230770\n",
+ "Epoch: 3977 avg_cer= 0.019230770\n",
+ "Epoch: 3978 avg_cer= 0.019230770\n",
+ "Epoch: 3979 avg_cer= 0.019230770\n",
+ "Epoch: 3980 avg_cer= 0.019230770\n",
+ "Epoch: 3981 avg_cer= 0.019230770\n",
+ "Epoch: 3982 avg_cer= 0.019230770\n",
+ "Epoch: 3983 avg_cer= 0.019230770\n",
+ "Epoch: 3984 avg_cer= 0.019230770\n",
+ "Epoch: 3985 avg_cer= 0.019230770\n",
+ "Epoch: 3986 avg_cer= 0.019230770\n",
+ "Epoch: 3987 avg_cer= 0.019230770\n",
+ "Epoch: 3988 avg_cer= 0.019230770\n",
+ "Epoch: 3989 avg_cer= 0.019230770\n",
+ "Epoch: 3990 avg_cer= 0.019230770\n",
+ "Epoch: 3991 avg_cer= 0.019230770\n",
+ "Epoch: 3992 avg_cer= 0.019230770\n",
+ "Epoch: 3993 avg_cer= 0.019230770\n",
+ "Epoch: 3994 avg_cer= 0.019230770\n",
+ "Epoch: 3995 avg_cer= 0.019230770\n",
+ "Epoch: 3996 avg_cer= 0.019230770\n",
+ "Epoch: 3997 avg_cer= 0.019230770\n",
+ "Epoch: 3998 avg_cer= 0.019230770\n",
+ "Epoch: 3999 avg_cer= 0.019230770\n",
+ "Epoch: 4000 avg_cer= 0.019230770\n",
+ "Epoch: 4001 avg_cer= 0.019230770\n",
+ "Epoch: 4002 avg_cer= 0.019230770\n",
+ "Epoch: 4003 avg_cer= 0.019230770\n",
+ "Epoch: 4004 avg_cer= 0.019230770\n",
+ "Epoch: 4005 avg_cer= 0.019230770\n",
+ "Epoch: 4006 avg_cer= 0.019230770\n",
+ "Epoch: 4007 avg_cer= 0.019230770\n",
+ "Epoch: 4008 avg_cer= 0.019230770\n",
+ "Epoch: 4009 avg_cer= 0.019230770\n",
+ "Epoch: 4010 avg_cer= 0.019230770\n",
+ "Epoch: 4011 avg_cer= 0.019230770\n",
+ "Epoch: 4012 avg_cer= 0.019230770\n",
+ "Epoch: 4013 avg_cer= 0.019230770\n",
+ "Epoch: 4014 avg_cer= 0.019230770\n",
+ "Epoch: 4015 avg_cer= 0.019230770\n",
+ "Epoch: 4016 avg_cer= 0.019230770\n",
+ "Epoch: 4017 avg_cer= 0.019230770\n",
+ "Epoch: 4018 avg_cer= 0.019230770\n",
+ "Epoch: 4019 avg_cer= 0.019230770\n",
+ "Epoch: 4020 avg_cer= 0.019230770\n",
+ "Epoch: 4021 avg_cer= 0.019230770\n",
+ "Epoch: 4022 avg_cer= 0.019230770\n",
+ "Epoch: 4023 avg_cer= 0.019230770\n",
+ "Epoch: 4024 avg_cer= 0.019230770\n",
+ "Epoch: 4025 avg_cer= 0.019230770\n",
+ "Epoch: 4026 avg_cer= 0.019230770\n",
+ "Epoch: 4027 avg_cer= 0.019230770\n",
+ "Epoch: 4028 avg_cer= 0.019230770\n",
+ "Epoch: 4029 avg_cer= 0.019230770\n",
+ "Epoch: 4030 avg_cer= 0.019230770\n",
+ "Epoch: 4031 avg_cer= 0.019230770\n",
+ "Epoch: 4032 avg_cer= 0.019230770\n",
+ "Epoch: 4033 avg_cer= 0.019230770\n",
+ "Epoch: 4034 avg_cer= 0.019230770\n",
+ "Epoch: 4035 avg_cer= 0.019230770\n",
+ "Epoch: 4036 avg_cer= 0.019230770\n",
+ "Epoch: 4037 avg_cer= 0.019230770\n",
+ "Epoch: 4038 avg_cer= 0.019230770\n",
+ "Epoch: 4039 avg_cer= 0.019230770\n",
+ "Epoch: 4040 avg_cer= 0.019230770\n",
+ "Epoch: 4041 avg_cer= 0.019230770\n",
+ "Epoch: 4042 avg_cer= 0.019230770\n",
+ "Epoch: 4043 avg_cer= 0.019230770\n",
+ "Epoch: 4044 avg_cer= 0.019230770\n",
+ "Epoch: 4045 avg_cer= 0.019230770\n",
+ "Epoch: 4046 avg_cer= 0.019230770\n",
+ "Epoch: 4047 avg_cer= 0.019230770\n",
+ "Epoch: 4048 avg_cer= 0.019230770\n",
+ "Epoch: 4049 avg_cer= 0.019230770\n",
+ "Epoch: 4050 avg_cer= 0.019230770\n",
+ "Epoch: 4051 avg_cer= 0.019230770\n",
+ "Epoch: 4052 avg_cer= 0.019230770\n",
+ "Epoch: 4053 avg_cer= 0.019230770\n",
+ "Epoch: 4054 avg_cer= 0.019230770\n",
+ "Epoch: 4055 avg_cer= 0.019230770\n",
+ "Epoch: 4056 avg_cer= 0.019230770\n",
+ "Epoch: 4057 avg_cer= 0.019230770\n",
+ "Epoch: 4058 avg_cer= 0.019230770\n",
+ "Epoch: 4059 avg_cer= 0.019230770\n",
+ "Epoch: 4060 avg_cer= 0.019230770\n",
+ "Epoch: 4061 avg_cer= 0.019230770\n",
+ "Epoch: 4062 avg_cer= 0.019230770\n",
+ "Epoch: 4063 avg_cer= 0.019230770\n",
+ "Epoch: 4064 avg_cer= 0.019230770\n",
+ "Epoch: 4065 avg_cer= 0.019230770\n",
+ "Epoch: 4066 avg_cer= 0.019230770\n",
+ "Epoch: 4067 avg_cer= 0.019230770\n",
+ "Epoch: 4068 avg_cer= 0.019230770\n",
+ "Epoch: 4069 avg_cer= 0.019230770\n",
+ "Epoch: 4070 avg_cer= 0.019230770\n",
+ "Epoch: 4071 avg_cer= 0.019230770\n",
+ "Epoch: 4072 avg_cer= 0.019230770\n",
+ "Epoch: 4073 avg_cer= 0.019230770\n",
+ "Epoch: 4074 avg_cer= 0.019230770\n",
+ "Epoch: 4075 avg_cer= 0.019230770\n",
+ "Epoch: 4076 avg_cer= 0.019230770\n",
+ "Epoch: 4077 avg_cer= 0.019230770\n",
+ "Epoch: 4078 avg_cer= 0.019230770\n",
+ "Epoch: 4079 avg_cer= 0.019230770\n",
+ "Epoch: 4080 avg_cer= 0.019230770\n",
+ "Epoch: 4081 avg_cer= 0.019230770\n",
+ "Epoch: 4082 avg_cer= 0.019230770\n",
+ "Epoch: 4083 avg_cer= 0.019230770\n",
+ "Epoch: 4084 avg_cer= 0.019230770\n",
+ "Epoch: 4085 avg_cer= 0.019230770\n",
+ "Epoch: 4086 avg_cer= 0.019230770\n",
+ "Epoch: 4087 avg_cer= 0.019230770\n",
+ "Epoch: 4088 avg_cer= 0.019230770\n",
+ "Epoch: 4089 avg_cer= 0.019230770\n",
+ "Epoch: 4090 avg_cer= 0.019230770\n",
+ "Epoch: 4091 avg_cer= 0.019230770\n",
+ "Epoch: 4092 avg_cer= 0.019230770\n",
+ "Epoch: 4093 avg_cer= 0.019230770\n",
+ "Epoch: 4094 avg_cer= 0.019230770\n",
+ "Epoch: 4095 avg_cer= 0.019230770\n",
+ "Epoch: 4096 avg_cer= 0.019230770\n",
+ "Epoch: 4097 avg_cer= 0.019230770\n",
+ "Epoch: 4098 avg_cer= 0.019230770\n",
+ "Epoch: 4099 avg_cer= 0.019230770\n",
+ "Epoch: 4100 avg_cer= 0.019230770\n",
+ "Epoch: 4101 avg_cer= 0.019230770\n",
+ "Epoch: 4102 avg_cer= 0.019230770\n",
+ "Epoch: 4103 avg_cer= 0.019230770\n",
+ "Epoch: 4104 avg_cer= 0.019230770\n",
+ "Epoch: 4105 avg_cer= 0.019230770\n",
+ "Epoch: 4106 avg_cer= 0.019230770\n",
+ "Epoch: 4107 avg_cer= 0.019230770\n",
+ "Epoch: 4108 avg_cer= 0.019230770\n",
+ "Epoch: 4109 avg_cer= 0.019230770\n",
+ "Epoch: 4110 avg_cer= 0.019230770\n",
+ "Epoch: 4111 avg_cer= 0.019230770\n",
+ "Epoch: 4112 avg_cer= 0.019230770\n",
+ "Epoch: 4113 avg_cer= 0.019230770\n",
+ "Epoch: 4114 avg_cer= 0.019230770\n",
+ "Epoch: 4115 avg_cer= 0.019230770\n",
+ "Epoch: 4116 avg_cer= 0.019230770\n",
+ "Epoch: 4117 avg_cer= 0.019230770\n",
+ "Epoch: 4118 avg_cer= 0.019230770\n",
+ "Epoch: 4119 avg_cer= 0.019230770\n",
+ "Epoch: 4120 avg_cer= 0.019230770\n",
+ "Epoch: 4121 avg_cer= 0.019230770\n",
+ "Epoch: 4122 avg_cer= 0.019230770\n",
+ "Epoch: 4123 avg_cer= 0.019230770\n",
+ "Epoch: 4124 avg_cer= 0.019230770\n",
+ "Epoch: 4125 avg_cer= 0.019230770\n",
+ "Epoch: 4126 avg_cer= 0.019230770\n",
+ "Epoch: 4127 avg_cer= 0.019230770\n",
+ "Epoch: 4128 avg_cer= 0.019230770\n",
+ "Epoch: 4129 avg_cer= 0.019230770\n",
+ "Epoch: 4130 avg_cer= 0.019230770\n",
+ "Epoch: 4131 avg_cer= 0.019230770\n",
+ "Epoch: 4132 avg_cer= 0.019230770\n",
+ "Epoch: 4133 avg_cer= 0.019230770\n",
+ "Epoch: 4134 avg_cer= 0.019230770\n",
+ "Epoch: 4135 avg_cer= 0.019230770\n",
+ "Epoch: 4136 avg_cer= 0.019230770\n",
+ "Epoch: 4137 avg_cer= 0.019230770\n",
+ "Epoch: 4138 avg_cer= 0.019230770\n",
+ "Epoch: 4139 avg_cer= 0.019230770\n",
+ "Epoch: 4140 avg_cer= 0.019230770\n",
+ "Epoch: 4141 avg_cer= 0.019230770\n",
+ "Epoch: 4142 avg_cer= 0.019230770\n",
+ "Epoch: 4143 avg_cer= 0.019230770\n",
+ "Epoch: 4144 avg_cer= 0.019230770\n",
+ "Epoch: 4145 avg_cer= 0.019230770\n",
+ "Epoch: 4146 avg_cer= 0.019230770\n",
+ "Epoch: 4147 avg_cer= 0.019230770\n",
+ "Epoch: 4148 avg_cer= 0.019230770\n",
+ "Epoch: 4149 avg_cer= 0.019230770\n",
+ "Epoch: 4150 avg_cer= 0.019230770\n",
+ "Epoch: 4151 avg_cer= 0.019230770\n",
+ "Epoch: 4152 avg_cer= 0.019230770\n",
+ "Epoch: 4153 avg_cer= 0.019230770\n",
+ "Epoch: 4154 avg_cer= 0.019230770\n",
+ "Epoch: 4155 avg_cer= 0.019230770\n",
+ "Epoch: 4156 avg_cer= 0.019230770\n",
+ "Epoch: 4157 avg_cer= 0.019230770\n",
+ "Epoch: 4158 avg_cer= 0.019230770\n",
+ "Epoch: 4159 avg_cer= 0.019230770\n",
+ "Epoch: 4160 avg_cer= 0.019230770\n",
+ "Epoch: 4161 avg_cer= 0.019230770\n",
+ "Epoch: 4162 avg_cer= 0.019230770\n",
+ "Epoch: 4163 avg_cer= 0.019230770\n",
+ "Epoch: 4164 avg_cer= 0.019230770\n",
+ "Epoch: 4165 avg_cer= 0.019230770\n",
+ "Epoch: 4166 avg_cer= 0.019230770\n",
+ "Epoch: 4167 avg_cer= 0.019230770\n",
+ "Epoch: 4168 avg_cer= 0.019230770\n",
+ "Epoch: 4169 avg_cer= 0.019230770\n",
+ "Epoch: 4170 avg_cer= 0.019230770\n",
+ "Epoch: 4171 avg_cer= 0.019230770\n",
+ "Epoch: 4172 avg_cer= 0.019230770\n",
+ "Epoch: 4173 avg_cer= 0.019230770\n",
+ "Epoch: 4174 avg_cer= 0.019230770\n",
+ "Epoch: 4175 avg_cer= 0.019230770\n",
+ "Epoch: 4176 avg_cer= 0.019230770\n",
+ "Epoch: 4177 avg_cer= 0.019230770\n",
+ "Epoch: 4178 avg_cer= 0.019230770\n",
+ "Epoch: 4179 avg_cer= 0.019230770\n",
+ "Epoch: 4180 avg_cer= 0.019230770\n",
+ "Epoch: 4181 avg_cer= 0.019230770\n",
+ "Epoch: 4182 avg_cer= 0.019230770\n",
+ "Epoch: 4183 avg_cer= 0.019230770\n",
+ "Epoch: 4184 avg_cer= 0.019230770\n",
+ "Epoch: 4185 avg_cer= 0.019230770\n",
+ "Epoch: 4186 avg_cer= 0.019230770\n",
+ "Epoch: 4187 avg_cer= 0.019230770\n",
+ "Epoch: 4188 avg_cer= 0.019230770\n",
+ "Epoch: 4189 avg_cer= 0.019230770\n",
+ "Epoch: 4190 avg_cer= 0.019230770\n",
+ "Epoch: 4191 avg_cer= 0.019230770\n",
+ "Epoch: 4192 avg_cer= 0.019230770\n",
+ "Epoch: 4193 avg_cer= 0.019230770\n",
+ "Epoch: 4194 avg_cer= 0.019230770\n",
+ "Epoch: 4195 avg_cer= 0.019230770\n",
+ "Epoch: 4196 avg_cer= 0.019230770\n",
+ "Epoch: 4197 avg_cer= 0.019230770\n",
+ "Epoch: 4198 avg_cer= 0.019230770\n",
+ "Epoch: 4199 avg_cer= 0.019230770\n",
+ "Epoch: 4200 avg_cer= 0.019230770\n",
+ "Epoch: 4201 avg_cer= 0.019230770\n",
+ "Epoch: 4202 avg_cer= 0.019230770\n",
+ "Epoch: 4203 avg_cer= 0.019230770\n",
+ "Epoch: 4204 avg_cer= 0.019230770\n",
+ "Epoch: 4205 avg_cer= 0.019230770\n",
+ "Epoch: 4206 avg_cer= 0.019230770\n",
+ "Epoch: 4207 avg_cer= 0.019230770\n",
+ "Epoch: 4208 avg_cer= 0.019230770\n",
+ "Epoch: 4209 avg_cer= 0.019230770\n",
+ "Epoch: 4210 avg_cer= 0.019230770\n",
+ "Epoch: 4211 avg_cer= 0.019230770\n",
+ "Epoch: 4212 avg_cer= 0.019230770\n",
+ "Epoch: 4213 avg_cer= 0.019230770\n",
+ "Epoch: 4214 avg_cer= 0.019230770\n",
+ "Epoch: 4215 avg_cer= 0.019230770\n",
+ "Epoch: 4216 avg_cer= 0.019230770\n",
+ "Epoch: 4217 avg_cer= 0.019230770\n",
+ "Epoch: 4218 avg_cer= 0.019230770\n",
+ "Epoch: 4219 avg_cer= 0.019230770\n",
+ "Epoch: 4220 avg_cer= 0.019230770\n",
+ "Epoch: 4221 avg_cer= 0.019230770\n",
+ "Epoch: 4222 avg_cer= 0.019230770\n",
+ "Epoch: 4223 avg_cer= 0.019230770\n",
+ "Epoch: 4224 avg_cer= 0.019230770\n",
+ "Epoch: 4225 avg_cer= 0.019230770\n",
+ "Epoch: 4226 avg_cer= 0.019230770\n",
+ "Epoch: 4227 avg_cer= 0.019230770\n",
+ "Epoch: 4228 avg_cer= 0.019230770\n",
+ "Epoch: 4229 avg_cer= 0.019230770\n",
+ "Epoch: 4230 avg_cer= 0.019230770\n",
+ "Epoch: 4231 avg_cer= 0.019230770\n",
+ "Epoch: 4232 avg_cer= 0.019230770\n",
+ "Epoch: 4233 avg_cer= 0.019230770\n",
+ "Epoch: 4234 avg_cer= 0.019230770\n",
+ "Epoch: 4235 avg_cer= 0.019230770\n",
+ "Epoch: 4236 avg_cer= 0.019230770\n",
+ "Epoch: 4237 avg_cer= 0.019230770\n",
+ "Epoch: 4238 avg_cer= 0.019230770\n",
+ "Epoch: 4239 avg_cer= 0.019230770\n",
+ "Epoch: 4240 avg_cer= 0.019230770\n",
+ "Epoch: 4241 avg_cer= 0.019230770\n",
+ "Epoch: 4242 avg_cer= 0.019230770\n",
+ "Epoch: 4243 avg_cer= 0.019230770\n",
+ "Epoch: 4244 avg_cer= 0.019230770\n",
+ "Epoch: 4245 avg_cer= 0.019230770\n",
+ "Epoch: 4246 avg_cer= 0.019230770\n",
+ "Epoch: 4247 avg_cer= 0.019230770\n",
+ "Epoch: 4248 avg_cer= 0.019230770\n",
+ "Epoch: 4249 avg_cer= 0.019230770\n",
+ "Epoch: 4250 avg_cer= 0.019230770\n",
+ "Epoch: 4251 avg_cer= 0.019230770\n",
+ "Epoch: 4252 avg_cer= 0.019230770\n",
+ "Epoch: 4253 avg_cer= 0.019230770\n",
+ "Epoch: 4254 avg_cer= 0.019230770\n",
+ "Epoch: 4255 avg_cer= 0.019230770\n",
+ "Epoch: 4256 avg_cer= 0.019230770\n",
+ "Epoch: 4257 avg_cer= 0.019230770\n",
+ "Epoch: 4258 avg_cer= 0.019230770\n",
+ "Epoch: 4259 avg_cer= 0.019230770\n",
+ "Epoch: 4260 avg_cer= 0.019230770\n",
+ "Epoch: 4261 avg_cer= 0.019230770\n",
+ "Epoch: 4262 avg_cer= 0.019230770\n",
+ "Epoch: 4263 avg_cer= 0.019230770\n",
+ "Epoch: 4264 avg_cer= 0.019230770\n",
+ "Epoch: 4265 avg_cer= 0.019230770\n",
+ "Epoch: 4266 avg_cer= 0.019230770\n",
+ "Epoch: 4267 avg_cer= 0.019230770\n",
+ "Epoch: 4268 avg_cer= 0.019230770\n",
+ "Epoch: 4269 avg_cer= 0.019230770\n",
+ "Epoch: 4270 avg_cer= 0.019230770\n",
+ "Epoch: 4271 avg_cer= 0.019230770\n",
+ "Epoch: 4272 avg_cer= 0.019230770\n",
+ "Epoch: 4273 avg_cer= 0.019230770\n",
+ "Epoch: 4274 avg_cer= 0.019230770\n",
+ "Epoch: 4275 avg_cer= 0.019230770\n",
+ "Epoch: 4276 avg_cer= 0.019230770\n",
+ "Epoch: 4277 avg_cer= 0.019230770\n",
+ "Epoch: 4278 avg_cer= 0.019230770\n",
+ "Epoch: 4279 avg_cer= 0.019230770\n",
+ "Epoch: 4280 avg_cer= 0.019230770\n",
+ "Epoch: 4281 avg_cer= 0.019230770\n",
+ "Epoch: 4282 avg_cer= 0.019230770\n",
+ "Epoch: 4283 avg_cer= 0.019230770\n",
+ "Epoch: 4284 avg_cer= 0.019230770\n",
+ "Epoch: 4285 avg_cer= 0.019230770\n",
+ "Epoch: 4286 avg_cer= 0.019230770\n",
+ "Epoch: 4287 avg_cer= 0.019230770\n",
+ "Epoch: 4288 avg_cer= 0.019230770\n",
+ "Epoch: 4289 avg_cer= 0.019230770\n",
+ "Epoch: 4290 avg_cer= 0.019230770\n",
+ "Epoch: 4291 avg_cer= 0.019230770\n",
+ "Epoch: 4292 avg_cer= 0.019230770\n",
+ "Epoch: 4293 avg_cer= 0.019230770\n",
+ "Epoch: 4294 avg_cer= 0.019230770\n",
+ "Epoch: 4295 avg_cer= 0.019230770\n",
+ "Epoch: 4296 avg_cer= 0.019230770\n",
+ "Epoch: 4297 avg_cer= 0.019230770\n",
+ "Epoch: 4298 avg_cer= 0.019230770\n",
+ "Epoch: 4299 avg_cer= 0.019230770\n",
+ "Epoch: 4300 avg_cer= 0.019230770\n",
+ "Epoch: 4301 avg_cer= 0.019230770\n",
+ "Epoch: 4302 avg_cer= 0.019230770\n",
+ "Epoch: 4303 avg_cer= 0.019230770\n",
+ "Epoch: 4304 avg_cer= 0.019230770\n",
+ "Epoch: 4305 avg_cer= 0.019230770\n",
+ "Epoch: 4306 avg_cer= 0.019230770\n",
+ "Epoch: 4307 avg_cer= 0.019230770\n",
+ "Epoch: 4308 avg_cer= 0.019230770\n",
+ "Epoch: 4309 avg_cer= 0.019230770\n",
+ "Epoch: 4310 avg_cer= 0.019230770\n",
+ "Epoch: 4311 avg_cer= 0.019230770\n",
+ "Epoch: 4312 avg_cer= 0.019230770\n",
+ "Epoch: 4313 avg_cer= 0.019230770\n",
+ "Epoch: 4314 avg_cer= 0.019230770\n",
+ "Epoch: 4315 avg_cer= 0.019230770\n",
+ "Epoch: 4316 avg_cer= 0.019230770\n",
+ "Epoch: 4317 avg_cer= 0.019230770\n",
+ "Epoch: 4318 avg_cer= 0.019230770\n",
+ "Epoch: 4319 avg_cer= 0.019230770\n",
+ "Epoch: 4320 avg_cer= 0.019230770\n",
+ "Epoch: 4321 avg_cer= 0.019230770\n",
+ "Epoch: 4322 avg_cer= 0.019230770\n",
+ "Epoch: 4323 avg_cer= 0.019230770\n",
+ "Epoch: 4324 avg_cer= 0.019230770\n",
+ "Epoch: 4325 avg_cer= 0.019230770\n",
+ "Epoch: 4326 avg_cer= 0.019230770\n",
+ "Epoch: 4327 avg_cer= 0.019230770\n",
+ "Epoch: 4328 avg_cer= 0.019230770\n",
+ "Epoch: 4329 avg_cer= 0.019230770\n",
+ "Epoch: 4330 avg_cer= 0.019230770\n",
+ "Epoch: 4331 avg_cer= 0.019230770\n",
+ "Epoch: 4332 avg_cer= 0.019230770\n",
+ "Epoch: 4333 avg_cer= 0.019230770\n",
+ "Epoch: 4334 avg_cer= 0.019230770\n",
+ "Epoch: 4335 avg_cer= 0.019230770\n",
+ "Epoch: 4336 avg_cer= 0.019230770\n",
+ "Epoch: 4337 avg_cer= 0.019230770\n",
+ "Epoch: 4338 avg_cer= 0.019230770\n",
+ "Epoch: 4339 avg_cer= 0.019230770\n",
+ "Epoch: 4340 avg_cer= 0.019230770\n",
+ "Epoch: 4341 avg_cer= 0.019230770\n",
+ "Epoch: 4342 avg_cer= 0.019230770\n",
+ "Epoch: 4343 avg_cer= 0.019230770\n",
+ "Epoch: 4344 avg_cer= 0.019230770\n",
+ "Epoch: 4345 avg_cer= 0.019230770\n",
+ "Epoch: 4346 avg_cer= 0.019230770\n",
+ "Epoch: 4347 avg_cer= 0.019230770\n",
+ "Epoch: 4348 avg_cer= 0.019230770\n",
+ "Epoch: 4349 avg_cer= 0.019230770\n",
+ "Epoch: 4350 avg_cer= 0.019230770\n",
+ "Epoch: 4351 avg_cer= 0.019230770\n",
+ "Epoch: 4352 avg_cer= 0.019230770\n",
+ "Epoch: 4353 avg_cer= 0.019230770\n",
+ "Epoch: 4354 avg_cer= 0.019230770\n",
+ "Epoch: 4355 avg_cer= 0.019230770\n",
+ "Epoch: 4356 avg_cer= 0.019230770\n",
+ "Epoch: 4357 avg_cer= 0.019230770\n",
+ "Epoch: 4358 avg_cer= 0.019230770\n",
+ "Epoch: 4359 avg_cer= 0.019230770\n",
+ "Epoch: 4360 avg_cer= 0.019230770\n",
+ "Epoch: 4361 avg_cer= 0.019230770\n",
+ "Epoch: 4362 avg_cer= 0.019230770\n",
+ "Epoch: 4363 avg_cer= 0.019230770\n",
+ "Epoch: 4364 avg_cer= 0.019230770\n",
+ "Epoch: 4365 avg_cer= 0.019230770\n",
+ "Epoch: 4366 avg_cer= 0.019230770\n",
+ "Epoch: 4367 avg_cer= 0.019230770\n",
+ "Epoch: 4368 avg_cer= 0.019230770\n",
+ "Epoch: 4369 avg_cer= 0.019230770\n",
+ "Epoch: 4370 avg_cer= 0.019230770\n",
+ "Epoch: 4371 avg_cer= 0.019230770\n",
+ "Epoch: 4372 avg_cer= 0.019230770\n",
+ "Epoch: 4373 avg_cer= 0.019230770\n",
+ "Epoch: 4374 avg_cer= 0.019230770\n",
+ "Epoch: 4375 avg_cer= 0.019230770\n",
+ "Epoch: 4376 avg_cer= 0.019230770\n",
+ "Epoch: 4377 avg_cer= 0.019230770\n",
+ "Epoch: 4378 avg_cer= 0.019230770\n",
+ "Epoch: 4379 avg_cer= 0.019230770\n",
+ "Epoch: 4380 avg_cer= 0.019230770\n",
+ "Epoch: 4381 avg_cer= 0.019230770\n",
+ "Epoch: 4382 avg_cer= 0.019230770\n",
+ "Epoch: 4383 avg_cer= 0.019230770\n",
+ "Epoch: 4384 avg_cer= 0.019230770\n",
+ "Epoch: 4385 avg_cer= 0.019230770\n",
+ "Epoch: 4386 avg_cer= 0.019230770\n",
+ "Epoch: 4387 avg_cer= 0.019230770\n",
+ "Epoch: 4388 avg_cer= 0.019230770\n",
+ "Epoch: 4389 avg_cer= 0.019230770\n",
+ "Epoch: 4390 avg_cer= 0.019230770\n",
+ "Epoch: 4391 avg_cer= 0.019230770\n",
+ "Epoch: 4392 avg_cer= 0.019230770\n",
+ "Epoch: 4393 avg_cer= 0.019230770\n",
+ "Epoch: 4394 avg_cer= 0.019230770\n",
+ "Epoch: 4395 avg_cer= 0.019230770\n",
+ "Epoch: 4396 avg_cer= 0.019230770\n",
+ "Epoch: 4397 avg_cer= 0.019230770\n",
+ "Epoch: 4398 avg_cer= 0.019230770\n",
+ "Epoch: 4399 avg_cer= 0.019230770\n",
+ "Epoch: 4400 avg_cer= 0.019230770\n",
+ "Epoch: 4401 avg_cer= 0.019230770\n",
+ "Epoch: 4402 avg_cer= 0.019230770\n",
+ "Epoch: 4403 avg_cer= 0.019230770\n",
+ "Epoch: 4404 avg_cer= 0.019230770\n",
+ "Epoch: 4405 avg_cer= 0.019230770\n",
+ "Epoch: 4406 avg_cer= 0.019230770\n",
+ "Epoch: 4407 avg_cer= 0.019230770\n",
+ "Epoch: 4408 avg_cer= 0.019230770\n",
+ "Epoch: 4409 avg_cer= 0.019230770\n",
+ "Epoch: 4410 avg_cer= 0.019230770\n",
+ "Epoch: 4411 avg_cer= 0.019230770\n",
+ "Epoch: 4412 avg_cer= 0.019230770\n",
+ "Epoch: 4413 avg_cer= 0.019230770\n",
+ "Epoch: 4414 avg_cer= 0.019230770\n",
+ "Epoch: 4415 avg_cer= 0.019230770\n",
+ "Epoch: 4416 avg_cer= 0.019230770\n",
+ "Epoch: 4417 avg_cer= 0.019230770\n",
+ "Epoch: 4418 avg_cer= 0.019230770\n",
+ "Epoch: 4419 avg_cer= 0.019230770\n",
+ "Epoch: 4420 avg_cer= 0.019230770\n",
+ "Epoch: 4421 avg_cer= 0.019230770\n",
+ "Epoch: 4422 avg_cer= 0.019230770\n",
+ "Epoch: 4423 avg_cer= 0.019230770\n",
+ "Epoch: 4424 avg_cer= 0.019230770\n",
+ "Epoch: 4425 avg_cer= 0.019230770\n",
+ "Epoch: 4426 avg_cer= 0.019230770\n",
+ "Epoch: 4427 avg_cer= 0.019230770\n",
+ "Epoch: 4428 avg_cer= 0.019230770\n",
+ "Epoch: 4429 avg_cer= 0.019230770\n",
+ "Epoch: 4430 avg_cer= 0.019230770\n",
+ "Epoch: 4431 avg_cer= 0.019230770\n",
+ "Epoch: 4432 avg_cer= 0.019230770\n",
+ "Epoch: 4433 avg_cer= 0.019230770\n",
+ "Epoch: 4434 avg_cer= 0.019230770\n",
+ "Epoch: 4435 avg_cer= 0.019230770\n",
+ "Epoch: 4436 avg_cer= 0.019230770\n",
+ "Epoch: 4437 avg_cer= 0.019230770\n",
+ "Epoch: 4438 avg_cer= 0.019230770\n",
+ "Epoch: 4439 avg_cer= 0.019230770\n",
+ "Epoch: 4440 avg_cer= 0.019230770\n",
+ "Epoch: 4441 avg_cer= 0.019230770\n",
+ "Epoch: 4442 avg_cer= 0.019230770\n",
+ "Epoch: 4443 avg_cer= 0.019230770\n",
+ "Epoch: 4444 avg_cer= 0.019230770\n",
+ "Epoch: 4445 avg_cer= 0.019230770\n",
+ "Epoch: 4446 avg_cer= 0.019230770\n",
+ "Epoch: 4447 avg_cer= 0.019230770\n",
+ "Epoch: 4448 avg_cer= 0.019230770\n",
+ "Epoch: 4449 avg_cer= 0.019230770\n",
+ "Epoch: 4450 avg_cer= 0.019230770\n",
+ "Epoch: 4451 avg_cer= 0.019230770\n",
+ "Epoch: 4452 avg_cer= 0.019230770\n",
+ "Epoch: 4453 avg_cer= 0.019230770\n",
+ "Epoch: 4454 avg_cer= 0.019230770\n",
+ "Epoch: 4455 avg_cer= 0.019230770\n",
+ "Epoch: 4456 avg_cer= 0.019230770\n",
+ "Epoch: 4457 avg_cer= 0.019230770\n",
+ "Epoch: 4458 avg_cer= 0.019230770\n",
+ "Epoch: 4459 avg_cer= 0.019230770\n",
+ "Epoch: 4460 avg_cer= 0.019230770\n",
+ "Epoch: 4461 avg_cer= 0.019230770\n",
+ "Epoch: 4462 avg_cer= 0.019230770\n",
+ "Epoch: 4463 avg_cer= 0.019230770\n",
+ "Epoch: 4464 avg_cer= 0.019230770\n",
+ "Epoch: 4465 avg_cer= 0.019230770\n",
+ "Epoch: 4466 avg_cer= 0.019230770\n",
+ "Epoch: 4467 avg_cer= 0.019230770\n",
+ "Epoch: 4468 avg_cer= 0.019230770\n",
+ "Epoch: 4469 avg_cer= 0.019230770\n",
+ "Epoch: 4470 avg_cer= 0.019230770\n",
+ "Epoch: 4471 avg_cer= 0.019230770\n",
+ "Epoch: 4472 avg_cer= 0.019230770\n",
+ "Epoch: 4473 avg_cer= 0.019230770\n",
+ "Epoch: 4474 avg_cer= 0.019230770\n",
+ "Epoch: 4475 avg_cer= 0.019230770\n",
+ "Epoch: 4476 avg_cer= 0.019230770\n",
+ "Epoch: 4477 avg_cer= 0.019230770\n",
+ "Epoch: 4478 avg_cer= 0.019230770\n",
+ "Epoch: 4479 avg_cer= 0.019230770\n",
+ "Epoch: 4480 avg_cer= 0.019230770\n",
+ "Epoch: 4481 avg_cer= 0.019230770\n",
+ "Epoch: 4482 avg_cer= 0.019230770\n",
+ "Epoch: 4483 avg_cer= 0.019230770\n",
+ "Epoch: 4484 avg_cer= 0.019230770\n",
+ "Epoch: 4485 avg_cer= 0.019230770\n",
+ "Epoch: 4486 avg_cer= 0.019230770\n",
+ "Epoch: 4487 avg_cer= 0.019230770\n",
+ "Epoch: 4488 avg_cer= 0.019230770\n",
+ "Epoch: 4489 avg_cer= 0.019230770\n",
+ "Epoch: 4490 avg_cer= 0.019230770\n",
+ "Epoch: 4491 avg_cer= 0.019230770\n",
+ "Epoch: 4492 avg_cer= 0.019230770\n",
+ "Epoch: 4493 avg_cer= 0.019230770\n",
+ "Epoch: 4494 avg_cer= 0.019230770\n",
+ "Epoch: 4495 avg_cer= 0.019230770\n",
+ "Epoch: 4496 avg_cer= 0.019230770\n",
+ "Epoch: 4497 avg_cer= 0.019230770\n",
+ "Epoch: 4498 avg_cer= 0.019230770\n",
+ "Epoch: 4499 avg_cer= 0.019230770\n",
+ "Epoch: 4500 avg_cer= 0.019230770\n",
+ "Epoch: 4501 avg_cer= 0.019230770\n",
+ "Epoch: 4502 avg_cer= 0.019230770\n",
+ "Epoch: 4503 avg_cer= 0.019230770\n",
+ "Epoch: 4504 avg_cer= 0.019230770\n",
+ "Epoch: 4505 avg_cer= 0.019230770\n",
+ "Epoch: 4506 avg_cer= 0.019230770\n",
+ "Epoch: 4507 avg_cer= 0.019230770\n",
+ "Epoch: 4508 avg_cer= 0.019230770\n",
+ "Epoch: 4509 avg_cer= 0.019230770\n",
+ "Epoch: 4510 avg_cer= 0.019230770\n",
+ "Epoch: 4511 avg_cer= 0.019230770\n",
+ "Epoch: 4512 avg_cer= 0.019230770\n",
+ "Epoch: 4513 avg_cer= 0.019230770\n",
+ "Epoch: 4514 avg_cer= 0.019230770\n",
+ "Epoch: 4515 avg_cer= 0.019230770\n",
+ "Epoch: 4516 avg_cer= 0.019230770\n",
+ "Epoch: 4517 avg_cer= 0.019230770\n",
+ "Epoch: 4518 avg_cer= 0.019230770\n",
+ "Epoch: 4519 avg_cer= 0.019230770\n",
+ "Epoch: 4520 avg_cer= 0.019230770\n",
+ "Epoch: 4521 avg_cer= 0.019230770\n",
+ "Epoch: 4522 avg_cer= 0.019230770\n",
+ "Epoch: 4523 avg_cer= 0.019230770\n",
+ "Epoch: 4524 avg_cer= 0.019230770\n",
+ "Epoch: 4525 avg_cer= 0.019230770\n",
+ "Epoch: 4526 avg_cer= 0.019230770\n",
+ "Epoch: 4527 avg_cer= 0.019230770\n",
+ "Epoch: 4528 avg_cer= 0.019230770\n",
+ "Epoch: 4529 avg_cer= 0.019230770\n",
+ "Epoch: 4530 avg_cer= 0.019230770\n",
+ "Epoch: 4531 avg_cer= 0.019230770\n",
+ "Epoch: 4532 avg_cer= 0.019230770\n",
+ "Epoch: 4533 avg_cer= 0.019230770\n",
+ "Epoch: 4534 avg_cer= 0.019230770\n",
+ "Epoch: 4535 avg_cer= 0.019230770\n",
+ "Epoch: 4536 avg_cer= 0.019230770\n",
+ "Epoch: 4537 avg_cer= 0.019230770\n",
+ "Epoch: 4538 avg_cer= 0.019230770\n",
+ "Epoch: 4539 avg_cer= 0.019230770\n",
+ "Epoch: 4540 avg_cer= 0.019230770\n",
+ "Epoch: 4541 avg_cer= 0.019230770\n",
+ "Epoch: 4542 avg_cer= 0.019230770\n",
+ "Epoch: 4543 avg_cer= 0.019230770\n",
+ "Epoch: 4544 avg_cer= 0.019230770\n",
+ "Epoch: 4545 avg_cer= 0.019230770\n",
+ "Epoch: 4546 avg_cer= 0.019230770\n",
+ "Epoch: 4547 avg_cer= 0.019230770\n",
+ "Epoch: 4548 avg_cer= 0.019230770\n",
+ "Epoch: 4549 avg_cer= 0.019230770\n",
+ "Epoch: 4550 avg_cer= 0.019230770\n",
+ "Epoch: 4551 avg_cer= 0.019230770\n",
+ "Epoch: 4552 avg_cer= 0.019230770\n",
+ "Epoch: 4553 avg_cer= 0.019230770\n",
+ "Epoch: 4554 avg_cer= 0.019230770\n",
+ "Epoch: 4555 avg_cer= 0.019230770\n",
+ "Epoch: 4556 avg_cer= 0.019230770\n",
+ "Epoch: 4557 avg_cer= 0.019230770\n",
+ "Epoch: 4558 avg_cer= 0.019230770\n",
+ "Epoch: 4559 avg_cer= 0.019230770\n",
+ "Epoch: 4560 avg_cer= 0.019230770\n",
+ "Epoch: 4561 avg_cer= 0.019230770\n",
+ "Epoch: 4562 avg_cer= 0.019230770\n",
+ "Epoch: 4563 avg_cer= 0.019230770\n",
+ "Epoch: 4564 avg_cer= 0.019230770\n",
+ "Epoch: 4565 avg_cer= 0.019230770\n",
+ "Epoch: 4566 avg_cer= 0.019230770\n",
+ "Epoch: 4567 avg_cer= 0.019230770\n",
+ "Epoch: 4568 avg_cer= 0.019230770\n",
+ "Epoch: 4569 avg_cer= 0.019230770\n",
+ "Epoch: 4570 avg_cer= 0.019230770\n",
+ "Epoch: 4571 avg_cer= 0.019230770\n",
+ "Epoch: 4572 avg_cer= 0.019230770\n",
+ "Epoch: 4573 avg_cer= 0.019230770\n",
+ "Epoch: 4574 avg_cer= 0.019230770\n",
+ "Epoch: 4575 avg_cer= 0.019230770\n",
+ "Epoch: 4576 avg_cer= 0.019230770\n",
+ "Epoch: 4577 avg_cer= 0.019230770\n",
+ "Epoch: 4578 avg_cer= 0.019230770\n",
+ "Epoch: 4579 avg_cer= 0.019230770\n",
+ "Epoch: 4580 avg_cer= 0.019230770\n",
+ "Epoch: 4581 avg_cer= 0.019230770\n",
+ "Epoch: 4582 avg_cer= 0.019230770\n",
+ "Epoch: 4583 avg_cer= 0.019230770\n",
+ "Epoch: 4584 avg_cer= 0.019230770\n",
+ "Epoch: 4585 avg_cer= 0.019230770\n",
+ "Epoch: 4586 avg_cer= 0.019230770\n",
+ "Epoch: 4587 avg_cer= 0.019230770\n",
+ "Epoch: 4588 avg_cer= 0.019230770\n",
+ "Epoch: 4589 avg_cer= 0.019230770\n",
+ "Epoch: 4590 avg_cer= 0.019230770\n",
+ "Epoch: 4591 avg_cer= 0.019230770\n",
+ "Epoch: 4592 avg_cer= 0.019230770\n",
+ "Epoch: 4593 avg_cer= 0.019230770\n",
+ "Epoch: 4594 avg_cer= 0.019230770\n",
+ "Epoch: 4595 avg_cer= 0.019230770\n",
+ "Epoch: 4596 avg_cer= 0.019230770\n",
+ "Epoch: 4597 avg_cer= 0.019230770\n",
+ "Epoch: 4598 avg_cer= 0.019230770\n",
+ "Epoch: 4599 avg_cer= 0.019230770\n",
+ "Epoch: 4600 avg_cer= 0.019230770\n",
+ "Epoch: 4601 avg_cer= 0.019230770\n",
+ "Epoch: 4602 avg_cer= 0.019230770\n",
+ "Epoch: 4603 avg_cer= 0.019230770\n",
+ "Epoch: 4604 avg_cer= 0.019230770\n",
+ "Epoch: 4605 avg_cer= 0.019230770\n",
+ "Epoch: 4606 avg_cer= 0.019230770\n",
+ "Epoch: 4607 avg_cer= 0.019230770\n",
+ "Epoch: 4608 avg_cer= 0.019230770\n",
+ "Epoch: 4609 avg_cer= 0.019230770\n",
+ "Epoch: 4610 avg_cer= 0.019230770\n",
+ "Epoch: 4611 avg_cer= 0.019230770\n",
+ "Epoch: 4612 avg_cer= 0.019230770\n",
+ "Epoch: 4613 avg_cer= 0.019230770\n",
+ "Epoch: 4614 avg_cer= 0.019230770\n",
+ "Epoch: 4615 avg_cer= 0.019230770\n",
+ "Epoch: 4616 avg_cer= 0.019230770\n",
+ "Epoch: 4617 avg_cer= 0.019230770\n",
+ "Epoch: 4618 avg_cer= 0.019230770\n",
+ "Epoch: 4619 avg_cer= 0.019230770\n",
+ "Epoch: 4620 avg_cer= 0.019230770\n",
+ "Epoch: 4621 avg_cer= 0.019230770\n",
+ "Epoch: 4622 avg_cer= 0.019230770\n",
+ "Epoch: 4623 avg_cer= 0.019230770\n",
+ "Epoch: 4624 avg_cer= 0.019230770\n",
+ "Epoch: 4625 avg_cer= 0.019230770\n",
+ "Epoch: 4626 avg_cer= 0.019230770\n",
+ "Epoch: 4627 avg_cer= 0.019230770\n",
+ "Epoch: 4628 avg_cer= 0.019230770\n",
+ "Epoch: 4629 avg_cer= 0.019230770\n",
+ "Epoch: 4630 avg_cer= 0.019230770\n",
+ "Epoch: 4631 avg_cer= 0.019230770\n",
+ "Epoch: 4632 avg_cer= 0.019230770\n",
+ "Epoch: 4633 avg_cer= 0.019230770\n",
+ "Epoch: 4634 avg_cer= 0.019230770\n",
+ "Epoch: 4635 avg_cer= 0.019230770\n",
+ "Epoch: 4636 avg_cer= 0.019230770\n",
+ "Epoch: 4637 avg_cer= 0.019230770\n",
+ "Epoch: 4638 avg_cer= 0.019230770\n",
+ "Epoch: 4639 avg_cer= 0.019230770\n",
+ "Epoch: 4640 avg_cer= 0.019230770\n",
+ "Epoch: 4641 avg_cer= 0.019230770\n",
+ "Epoch: 4642 avg_cer= 0.019230770\n",
+ "Epoch: 4643 avg_cer= 0.019230770\n",
+ "Epoch: 4644 avg_cer= 0.019230770\n",
+ "Epoch: 4645 avg_cer= 0.019230770\n",
+ "Epoch: 4646 avg_cer= 0.019230770\n",
+ "Epoch: 4647 avg_cer= 0.019230770\n",
+ "Epoch: 4648 avg_cer= 0.019230770\n",
+ "Epoch: 4649 avg_cer= 0.019230770\n",
+ "Epoch: 4650 avg_cer= 0.019230770\n",
+ "Epoch: 4651 avg_cer= 0.019230770\n",
+ "Epoch: 4652 avg_cer= 0.019230770\n",
+ "Epoch: 4653 avg_cer= 0.019230770\n",
+ "Epoch: 4654 avg_cer= 0.019230770\n",
+ "Epoch: 4655 avg_cer= 0.019230770\n",
+ "Epoch: 4656 avg_cer= 0.019230770\n",
+ "Epoch: 4657 avg_cer= 0.019230770\n",
+ "Epoch: 4658 avg_cer= 0.019230770\n",
+ "Epoch: 4659 avg_cer= 0.019230770\n",
+ "Epoch: 4660 avg_cer= 0.019230770\n",
+ "Epoch: 4661 avg_cer= 0.019230770\n",
+ "Epoch: 4662 avg_cer= 0.019230770\n",
+ "Epoch: 4663 avg_cer= 0.019230770\n",
+ "Epoch: 4664 avg_cer= 0.019230770\n",
+ "Epoch: 4665 avg_cer= 0.019230770\n",
+ "Epoch: 4666 avg_cer= 0.019230770\n",
+ "Epoch: 4667 avg_cer= 0.019230770\n",
+ "Epoch: 4668 avg_cer= 0.019230770\n",
+ "Epoch: 4669 avg_cer= 0.019230770\n",
+ "Epoch: 4670 avg_cer= 0.019230770\n",
+ "Epoch: 4671 avg_cer= 0.019230770\n",
+ "Epoch: 4672 avg_cer= 0.019230770\n",
+ "Epoch: 4673 avg_cer= 0.019230770\n",
+ "Epoch: 4674 avg_cer= 0.019230770\n",
+ "Epoch: 4675 avg_cer= 0.019230770\n",
+ "Epoch: 4676 avg_cer= 0.019230770\n",
+ "Epoch: 4677 avg_cer= 0.019230770\n",
+ "Epoch: 4678 avg_cer= 0.019230770\n",
+ "Epoch: 4679 avg_cer= 0.019230770\n",
+ "Epoch: 4680 avg_cer= 0.019230770\n",
+ "Epoch: 4681 avg_cer= 0.019230770\n",
+ "Epoch: 4682 avg_cer= 0.019230770\n",
+ "Epoch: 4683 avg_cer= 0.019230770\n",
+ "Epoch: 4684 avg_cer= 0.019230770\n",
+ "Epoch: 4685 avg_cer= 0.019230770\n",
+ "Epoch: 4686 avg_cer= 0.019230770\n",
+ "Epoch: 4687 avg_cer= 0.019230770\n",
+ "Epoch: 4688 avg_cer= 0.019230770\n",
+ "Epoch: 4689 avg_cer= 0.019230770\n",
+ "Epoch: 4690 avg_cer= 0.019230770\n",
+ "Epoch: 4691 avg_cer= 0.019230770\n",
+ "Epoch: 4692 avg_cer= 0.019230770\n",
+ "Epoch: 4693 avg_cer= 0.019230770\n",
+ "Epoch: 4694 avg_cer= 0.019230770\n",
+ "Epoch: 4695 avg_cer= 0.019230770\n",
+ "Epoch: 4696 avg_cer= 0.019230770\n",
+ "Epoch: 4697 avg_cer= 0.019230770\n",
+ "Epoch: 4698 avg_cer= 0.019230770\n",
+ "Epoch: 4699 avg_cer= 0.019230770\n",
+ "Epoch: 4700 avg_cer= 0.019230770\n",
+ "Epoch: 4701 avg_cer= 0.019230770\n",
+ "Epoch: 4702 avg_cer= 0.019230770\n",
+ "Epoch: 4703 avg_cer= 0.019230770\n",
+ "Epoch: 4704 avg_cer= 0.019230770\n",
+ "Epoch: 4705 avg_cer= 0.019230770\n",
+ "Epoch: 4706 avg_cer= 0.019230770\n",
+ "Epoch: 4707 avg_cer= 0.019230770\n",
+ "Epoch: 4708 avg_cer= 0.019230770\n",
+ "Epoch: 4709 avg_cer= 0.019230770\n",
+ "Epoch: 4710 avg_cer= 0.019230770\n",
+ "Epoch: 4711 avg_cer= 0.019230770\n",
+ "Epoch: 4712 avg_cer= 0.019230770\n",
+ "Epoch: 4713 avg_cer= 0.019230770\n",
+ "Epoch: 4714 avg_cer= 0.019230770\n",
+ "Epoch: 4715 avg_cer= 0.019230770\n",
+ "Epoch: 4716 avg_cer= 0.019230770\n",
+ "Epoch: 4717 avg_cer= 0.019230770\n",
+ "Epoch: 4718 avg_cer= 0.019230770\n",
+ "Epoch: 4719 avg_cer= 0.019230770\n",
+ "Epoch: 4720 avg_cer= 0.019230770\n",
+ "Epoch: 4721 avg_cer= 0.019230770\n",
+ "Epoch: 4722 avg_cer= 0.019230770\n",
+ "Epoch: 4723 avg_cer= 0.019230770\n",
+ "Epoch: 4724 avg_cer= 0.019230770\n",
+ "Epoch: 4725 avg_cer= 0.019230770\n",
+ "Epoch: 4726 avg_cer= 0.019230770\n",
+ "Epoch: 4727 avg_cer= 0.019230770\n",
+ "Epoch: 4728 avg_cer= 0.019230770\n",
+ "Epoch: 4729 avg_cer= 0.019230770\n",
+ "Epoch: 4730 avg_cer= 0.019230770\n",
+ "Epoch: 4731 avg_cer= 0.019230770\n",
+ "Epoch: 4732 avg_cer= 0.019230770\n",
+ "Epoch: 4733 avg_cer= 0.019230770\n",
+ "Epoch: 4734 avg_cer= 0.019230770\n",
+ "Epoch: 4735 avg_cer= 0.019230770\n",
+ "Epoch: 4736 avg_cer= 0.019230770\n",
+ "Epoch: 4737 avg_cer= 0.019230770\n",
+ "Epoch: 4738 avg_cer= 0.019230770\n",
+ "Epoch: 4739 avg_cer= 0.019230770\n",
+ "Epoch: 4740 avg_cer= 0.019230770\n",
+ "Epoch: 4741 avg_cer= 0.019230770\n",
+ "Epoch: 4742 avg_cer= 0.019230770\n",
+ "Epoch: 4743 avg_cer= 0.019230770\n",
+ "Epoch: 4744 avg_cer= 0.019230770\n",
+ "Epoch: 4745 avg_cer= 0.019230770\n",
+ "Epoch: 4746 avg_cer= 0.019230770\n",
+ "Epoch: 4747 avg_cer= 0.019230770\n",
+ "Epoch: 4748 avg_cer= 0.019230770\n",
+ "Epoch: 4749 avg_cer= 0.019230770\n",
+ "Epoch: 4750 avg_cer= 0.019230770\n",
+ "Epoch: 4751 avg_cer= 0.019230770\n",
+ "Epoch: 4752 avg_cer= 0.019230770\n",
+ "Epoch: 4753 avg_cer= 0.019230770\n",
+ "Epoch: 4754 avg_cer= 0.019230770\n",
+ "Epoch: 4755 avg_cer= 0.019230770\n",
+ "Epoch: 4756 avg_cer= 0.019230770\n",
+ "Epoch: 4757 avg_cer= 0.019230770\n",
+ "Epoch: 4758 avg_cer= 0.019230770\n",
+ "Epoch: 4759 avg_cer= 0.019230770\n",
+ "Epoch: 4760 avg_cer= 0.019230770\n",
+ "Epoch: 4761 avg_cer= 0.019230770\n",
+ "Epoch: 4762 avg_cer= 0.019230770\n",
+ "Epoch: 4763 avg_cer= 0.019230770\n",
+ "Epoch: 4764 avg_cer= 0.019230770\n",
+ "Epoch: 4765 avg_cer= 0.019230770\n",
+ "Epoch: 4766 avg_cer= 0.019230770\n",
+ "Epoch: 4767 avg_cer= 0.019230770\n",
+ "Epoch: 4768 avg_cer= 0.019230770\n",
+ "Epoch: 4769 avg_cer= 0.019230770\n",
+ "Epoch: 4770 avg_cer= 0.019230770\n",
+ "Epoch: 4771 avg_cer= 0.019230770\n",
+ "Epoch: 4772 avg_cer= 0.019230770\n",
+ "Epoch: 4773 avg_cer= 0.019230770\n",
+ "Epoch: 4774 avg_cer= 0.019230770\n",
+ "Epoch: 4775 avg_cer= 0.019230770\n",
+ "Epoch: 4776 avg_cer= 0.019230770\n",
+ "Epoch: 4777 avg_cer= 0.019230770\n",
+ "Epoch: 4778 avg_cer= 0.019230770\n",
+ "Epoch: 4779 avg_cer= 0.019230770\n",
+ "Epoch: 4780 avg_cer= 0.019230770\n",
+ "Epoch: 4781 avg_cer= 0.019230770\n",
+ "Epoch: 4782 avg_cer= 0.019230770\n",
+ "Epoch: 4783 avg_cer= 0.019230770\n",
+ "Epoch: 4784 avg_cer= 0.019230770\n",
+ "Epoch: 4785 avg_cer= 0.019230770\n",
+ "Epoch: 4786 avg_cer= 0.019230770\n",
+ "Epoch: 4787 avg_cer= 0.019230770\n",
+ "Epoch: 4788 avg_cer= 0.019230770\n",
+ "Epoch: 4789 avg_cer= 0.019230770\n",
+ "Epoch: 4790 avg_cer= 0.019230770\n",
+ "Epoch: 4791 avg_cer= 0.019230770\n",
+ "Epoch: 4792 avg_cer= 0.019230770\n",
+ "Epoch: 4793 avg_cer= 0.019230770\n",
+ "Epoch: 4794 avg_cer= 0.019230770\n",
+ "Epoch: 4795 avg_cer= 0.019230770\n",
+ "Epoch: 4796 avg_cer= 0.019230770\n",
+ "Epoch: 4797 avg_cer= 0.019230770\n",
+ "Epoch: 4798 avg_cer= 0.019230770\n",
+ "Epoch: 4799 avg_cer= 0.019230770\n",
+ "Epoch: 4800 avg_cer= 0.019230770\n",
+ "Epoch: 4801 avg_cer= 0.019230770\n",
+ "Epoch: 4802 avg_cer= 0.019230770\n",
+ "Epoch: 4803 avg_cer= 0.019230770\n",
+ "Epoch: 4804 avg_cer= 0.019230770\n",
+ "Epoch: 4805 avg_cer= 0.019230770\n",
+ "Epoch: 4806 avg_cer= 0.019230770\n",
+ "Epoch: 4807 avg_cer= 0.019230770\n",
+ "Epoch: 4808 avg_cer= 0.019230770\n",
+ "Epoch: 4809 avg_cer= 0.019230770\n",
+ "Epoch: 4810 avg_cer= 0.019230770\n",
+ "Epoch: 4811 avg_cer= 0.019230770\n",
+ "Epoch: 4812 avg_cer= 0.019230770\n",
+ "Epoch: 4813 avg_cer= 0.019230770\n",
+ "Epoch: 4814 avg_cer= 0.019230770\n",
+ "Epoch: 4815 avg_cer= 0.019230770\n",
+ "Epoch: 4816 avg_cer= 0.019230770\n",
+ "Epoch: 4817 avg_cer= 0.019230770\n",
+ "Epoch: 4818 avg_cer= 0.019230770\n",
+ "Epoch: 4819 avg_cer= 0.019230770\n",
+ "Epoch: 4820 avg_cer= 0.019230770\n",
+ "Epoch: 4821 avg_cer= 0.019230770\n",
+ "Epoch: 4822 avg_cer= 0.019230770\n",
+ "Epoch: 4823 avg_cer= 0.019230770\n",
+ "Epoch: 4824 avg_cer= 0.019230770\n",
+ "Epoch: 4825 avg_cer= 0.019230770\n",
+ "Epoch: 4826 avg_cer= 0.019230770\n",
+ "Epoch: 4827 avg_cer= 0.019230770\n",
+ "Epoch: 4828 avg_cer= 0.019230770\n",
+ "Epoch: 4829 avg_cer= 0.019230770\n",
+ "Epoch: 4830 avg_cer= 0.019230770\n",
+ "Epoch: 4831 avg_cer= 0.019230770\n",
+ "Epoch: 4832 avg_cer= 0.019230770\n",
+ "Epoch: 4833 avg_cer= 0.019230770\n",
+ "Epoch: 4834 avg_cer= 0.019230770\n",
+ "Epoch: 4835 avg_cer= 0.019230770\n",
+ "Epoch: 4836 avg_cer= 0.019230770\n",
+ "Epoch: 4837 avg_cer= 0.019230770\n",
+ "Epoch: 4838 avg_cer= 0.019230770\n",
+ "Epoch: 4839 avg_cer= 0.019230770\n",
+ "Epoch: 4840 avg_cer= 0.019230770\n",
+ "Epoch: 4841 avg_cer= 0.019230770\n",
+ "Epoch: 4842 avg_cer= 0.019230770\n",
+ "Epoch: 4843 avg_cer= 0.019230770\n",
+ "Epoch: 4844 avg_cer= 0.019230770\n",
+ "Epoch: 4845 avg_cer= 0.019230770\n",
+ "Epoch: 4846 avg_cer= 0.019230770\n",
+ "Epoch: 4847 avg_cer= 0.019230770\n",
+ "Epoch: 4848 avg_cer= 0.019230770\n",
+ "Epoch: 4849 avg_cer= 0.019230770\n",
+ "Epoch: 4850 avg_cer= 0.019230770\n",
+ "Epoch: 4851 avg_cer= 0.019230770\n",
+ "Epoch: 4852 avg_cer= 0.019230770\n",
+ "Epoch: 4853 avg_cer= 0.019230770\n",
+ "Epoch: 4854 avg_cer= 0.019230770\n",
+ "Epoch: 4855 avg_cer= 0.019230770\n",
+ "Epoch: 4856 avg_cer= 0.019230770\n",
+ "Epoch: 4857 avg_cer= 0.019230770\n",
+ "Epoch: 4858 avg_cer= 0.019230770\n",
+ "Epoch: 4859 avg_cer= 0.019230770\n",
+ "Epoch: 4860 avg_cer= 0.019230770\n",
+ "Epoch: 4861 avg_cer= 0.019230770\n",
+ "Epoch: 4862 avg_cer= 0.019230770\n",
+ "Epoch: 4863 avg_cer= 0.019230770\n",
+ "Epoch: 4864 avg_cer= 0.019230770\n",
+ "Epoch: 4865 avg_cer= 0.019230770\n",
+ "Epoch: 4866 avg_cer= 0.019230770\n",
+ "Epoch: 4867 avg_cer= 0.019230770\n",
+ "Epoch: 4868 avg_cer= 0.019230770\n",
+ "Epoch: 4869 avg_cer= 0.019230770\n",
+ "Epoch: 4870 avg_cer= 0.019230770\n",
+ "Epoch: 4871 avg_cer= 0.019230770\n",
+ "Epoch: 4872 avg_cer= 0.019230770\n",
+ "Epoch: 4873 avg_cer= 0.019230770\n",
+ "Epoch: 4874 avg_cer= 0.019230770\n",
+ "Epoch: 4875 avg_cer= 0.019230770\n",
+ "Epoch: 4876 avg_cer= 0.019230770\n",
+ "Epoch: 4877 avg_cer= 0.019230770\n",
+ "Epoch: 4878 avg_cer= 0.019230770\n",
+ "Epoch: 4879 avg_cer= 0.019230770\n",
+ "Epoch: 4880 avg_cer= 0.019230770\n",
+ "Epoch: 4881 avg_cer= 0.019230770\n",
+ "Epoch: 4882 avg_cer= 0.019230770\n",
+ "Epoch: 4883 avg_cer= 0.019230770\n",
+ "Epoch: 4884 avg_cer= 0.019230770\n",
+ "Epoch: 4885 avg_cer= 0.019230770\n",
+ "Epoch: 4886 avg_cer= 0.019230770\n",
+ "Epoch: 4887 avg_cer= 0.019230770\n",
+ "Epoch: 4888 avg_cer= 0.019230770\n",
+ "Epoch: 4889 avg_cer= 0.019230770\n",
+ "Epoch: 4890 avg_cer= 0.019230770\n",
+ "Epoch: 4891 avg_cer= 0.019230770\n",
+ "Epoch: 4892 avg_cer= 0.019230770\n",
+ "Epoch: 4893 avg_cer= 0.019230770\n",
+ "Epoch: 4894 avg_cer= 0.019230770\n",
+ "Epoch: 4895 avg_cer= 0.019230770\n",
+ "Epoch: 4896 avg_cer= 0.019230770\n",
+ "Epoch: 4897 avg_cer= 0.019230770\n",
+ "Epoch: 4898 avg_cer= 0.019230770\n",
+ "Epoch: 4899 avg_cer= 0.019230770\n",
+ "Epoch: 4900 avg_cer= 0.019230770\n",
+ "Epoch: 4901 avg_cer= 0.019230770\n",
+ "Epoch: 4902 avg_cer= 0.019230770\n",
+ "Epoch: 4903 avg_cer= 0.019230770\n",
+ "Epoch: 4904 avg_cer= 0.019230770\n",
+ "Epoch: 4905 avg_cer= 0.019230770\n",
+ "Epoch: 4906 avg_cer= 0.019230770\n",
+ "Epoch: 4907 avg_cer= 0.019230770\n",
+ "Epoch: 4908 avg_cer= 0.019230770\n",
+ "Epoch: 4909 avg_cer= 0.019230770\n",
+ "Epoch: 4910 avg_cer= 0.019230770\n",
+ "Epoch: 4911 avg_cer= 0.019230770\n",
+ "Epoch: 4912 avg_cer= 0.019230770\n",
+ "Epoch: 4913 avg_cer= 0.019230770\n",
+ "Epoch: 4914 avg_cer= 0.019230770\n",
+ "Epoch: 4915 avg_cer= 0.019230770\n",
+ "Epoch: 4916 avg_cer= 0.019230770\n",
+ "Epoch: 4917 avg_cer= 0.019230770\n",
+ "Epoch: 4918 avg_cer= 0.019230770\n",
+ "Epoch: 4919 avg_cer= 0.019230770\n",
+ "Epoch: 4920 avg_cer= 0.019230770\n",
+ "Epoch: 4921 avg_cer= 0.019230770\n",
+ "Epoch: 4922 avg_cer= 0.019230770\n",
+ "Epoch: 4923 avg_cer= 0.019230770\n",
+ "Epoch: 4924 avg_cer= 0.019230770\n",
+ "Epoch: 4925 avg_cer= 0.019230770\n",
+ "Epoch: 4926 avg_cer= 0.019230770\n",
+ "Epoch: 4927 avg_cer= 0.019230770\n",
+ "Epoch: 4928 avg_cer= 0.019230770\n",
+ "Epoch: 4929 avg_cer= 0.019230770\n",
+ "Epoch: 4930 avg_cer= 0.019230770\n",
+ "Epoch: 4931 avg_cer= 0.019230770\n",
+ "Epoch: 4932 avg_cer= 0.019230770\n",
+ "Epoch: 4933 avg_cer= 0.019230770\n",
+ "Epoch: 4934 avg_cer= 0.019230770\n",
+ "Epoch: 4935 avg_cer= 0.019230770\n",
+ "Epoch: 4936 avg_cer= 0.019230770\n",
+ "Epoch: 4937 avg_cer= 0.019230770\n",
+ "Epoch: 4938 avg_cer= 0.019230770\n",
+ "Epoch: 4939 avg_cer= 0.019230770\n",
+ "Epoch: 4940 avg_cer= 0.019230770\n",
+ "Epoch: 4941 avg_cer= 0.019230770\n",
+ "Epoch: 4942 avg_cer= 0.019230770\n",
+ "Epoch: 4943 avg_cer= 0.019230770\n",
+ "Epoch: 4944 avg_cer= 0.019230770\n",
+ "Epoch: 4945 avg_cer= 0.019230770\n",
+ "Epoch: 4946 avg_cer= 0.019230770\n",
+ "Epoch: 4947 avg_cer= 0.019230770\n",
+ "Epoch: 4948 avg_cer= 0.019230770\n",
+ "Epoch: 4949 avg_cer= 0.019230770\n",
+ "Epoch: 4950 avg_cer= 0.019230770\n",
+ "Epoch: 4951 avg_cer= 0.019230770\n",
+ "Epoch: 4952 avg_cer= 0.019230770\n",
+ "Epoch: 4953 avg_cer= 0.019230770\n",
+ "Epoch: 4954 avg_cer= 0.019230770\n",
+ "Epoch: 4955 avg_cer= 0.019230770\n",
+ "Epoch: 4956 avg_cer= 0.019230770\n",
+ "Epoch: 4957 avg_cer= 0.019230770\n",
+ "Epoch: 4958 avg_cer= 0.019230770\n",
+ "Epoch: 4959 avg_cer= 0.019230770\n",
+ "Epoch: 4960 avg_cer= 0.019230770\n",
+ "Epoch: 4961 avg_cer= 0.019230770\n",
+ "Epoch: 4962 avg_cer= 0.019230770\n",
+ "Epoch: 4963 avg_cer= 0.019230770\n",
+ "Epoch: 4964 avg_cer= 0.019230770\n",
+ "Epoch: 4965 avg_cer= 0.019230770\n",
+ "Epoch: 4966 avg_cer= 0.019230770\n",
+ "Epoch: 4967 avg_cer= 0.019230770\n",
+ "Epoch: 4968 avg_cer= 0.019230770\n",
+ "Epoch: 4969 avg_cer= 0.019230770\n",
+ "Epoch: 4970 avg_cer= 0.019230770\n",
+ "Epoch: 4971 avg_cer= 0.019230770\n",
+ "Epoch: 4972 avg_cer= 0.019230770\n",
+ "Epoch: 4973 avg_cer= 0.019230770\n",
+ "Epoch: 4974 avg_cer= 0.019230770\n",
+ "Epoch: 4975 avg_cer= 0.019230770\n",
+ "Epoch: 4976 avg_cer= 0.019230770\n",
+ "Epoch: 4977 avg_cer= 0.019230770\n",
+ "Epoch: 4978 avg_cer= 0.019230770\n",
+ "Epoch: 4979 avg_cer= 0.019230770\n",
+ "Epoch: 4980 avg_cer= 0.019230770\n",
+ "Epoch: 4981 avg_cer= 0.019230770\n",
+ "Epoch: 4982 avg_cer= 0.019230770\n",
+ "Epoch: 4983 avg_cer= 0.019230770\n",
+ "Epoch: 4984 avg_cer= 0.019230770\n",
+ "Epoch: 4985 avg_cer= 0.019230770\n",
+ "Epoch: 4986 avg_cer= 0.019230770\n",
+ "Epoch: 4987 avg_cer= 0.019230770\n",
+ "Epoch: 4988 avg_cer= 0.019230770\n",
+ "Epoch: 4989 avg_cer= 0.019230770\n",
+ "Epoch: 4990 avg_cer= 0.019230770\n",
+ "Epoch: 4991 avg_cer= 0.019230770\n",
+ "Epoch: 4992 avg_cer= 0.019230770\n",
+ "Epoch: 4993 avg_cer= 0.019230770\n",
+ "Epoch: 4994 avg_cer= 0.019230770\n",
+ "Epoch: 4995 avg_cer= 0.019230770\n",
+ "Epoch: 4996 avg_cer= 0.019230770\n",
+ "Epoch: 4997 avg_cer= 0.019230770\n",
+ "Epoch: 4998 avg_cer= 0.019230770\n",
+ "Epoch: 4999 avg_cer= 0.019230770\n",
+ "Epoch: 5000 avg_cer= 0.019230770\n",
+ "Optimization Finished!\n",
+ "Decoded:\n",
+ "she had your dark suit in greasy wash water al year\n"
+ ]
+ }
+ ],
"source": [
- "where"
+ "with tf.Session() as session:\n",
+ " # Initialize all variables\n",
+ " tf.initialize_all_variables().run()\n",
+ " \n",
+ " # Loop over the data set for training_epochs epochs\n",
+ " for epoch in range(training_iters):\n",
+ " # Define total_loss\n",
+ " total_loss = 0\n",
+ " \n",
+ " # Define character error rate\n",
+ " train_cer = 0\n",
+ " \n",
+ " # Determine the total number of batches\n",
+ " total_batch = int(ted_lium.train.num_examples/batch_size)\n",
+ " \n",
+ " # Loop over the batches\n",
+ " for batch in range(total_batch):\n",
+ " # Obtain the next batch of data\n",
+ " batch_x, batch_y, batch_seq_len = ted_lium.train.next_batch(batch_size)\n",
+ " \n",
+ " # Create a map to fill the placeholders with batch data\n",
+ " feed = {x: batch_x,\n",
+ " y: batch_y,\n",
+ " seq_len: batch_seq_len,\n",
+ " keep_prob: (1 - dropout_rate)}\n",
+ " \n",
+ " # Train on the current batch\n",
+ " batch_avg_loss, _ = session.run([avg_loss, optimizer], feed)\n",
+ " train_cer += session.run(acc, feed_dict=feed)\n",
+ " \n",
+ " # Add batch_avg_loss to total_loss\n",
+ " total_loss += batch_avg_loss\n",
+ " \n",
+ " if epoch % display_step == 0:\n",
+ " print \"Epoch:\", '%04d' % (epoch+1), \"avg_cer=\", \"{:.9f}\".format((train_cer / total_batch))\n",
+ "\n",
+ " # Indicate optimization has concluded\n",
+ " print \"Optimization Finished!\"\n",
+ " \n",
+ " # Decoding\n",
+ " d = session.run(decoded[0], feed_dict=feed)\n",
+ " str_decoded = ''.join([chr(xt) for xt in np.asarray(d[1]) + (ord('a') - 1 )])\n",
+ " # Replacing blank label to none\n",
+ " str_decoded = str_decoded.replace(chr(ord('z') + 1), '')\n",
+ " # Replacing space label to space\n",
+ " str_decoded = str_decoded.replace(chr(ord('a') - 1), ' ')\n",
+ " print('Decoded:\\n%s' % str_decoded)"
]
},
{
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\pi^* \\equiv \\underset{\\pi}{\\operatorname{argmax}} \\Pr(\\pi \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Best path decoding is trivial to compute, since $\\pi^∗$ is just the concatenation of the most active outputs at every timestep. However it can lead to errors, particularly if a label is weakly predicted for several consecutive timesteps."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Prefix Search Decoding"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The second method (prefix search decoding) relies on the fact that, by modifying the forward variables $\\alpha(t,u)$, we can efficiently calculate the probabilities of successive extensions of labelling prefixes."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Prefix search decoding is a best-first search through the tree of labellings, where the children of a given labelling are those that share it as a prefix. At each step the search extends the labelling whose children have the largest cumulative probability. This is illustrated in the following diagram"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Here each node either ends (‘e’) or extends the prefix at its parent node. The number above an extending node is the total probability of all labellings beginning with that prefix. The number above an end node is the probability of the labelling ending at its parent. At every iteration the extensions of the most probable remaining prefix are explored. Search ends when a single labelling (here ‘XY’) is more probable than any remaining prefix."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Let $\\gamma(p_n,t)$ be the probability of the network outputting prefix $p$ by time $t$ such that a non-blank label is output at $t$, let $\\gamma(p_b,t)$ be the probability of the network outputting prefix $p$ by time $t$ such that the blank label is output at $t$, and let the set $Y = \\{ \\pi \\in A′^t : \\mathcal{B}(\\pi) = p \\}$. Then"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\n",
- "\\begin{align}\n",
- "\\gamma(p_n,t) &= \\sum_{\\pi \\in Y : \\pi_t = p_{|p|}} \\Pr(\\pi \\, | \\, x) \\\\\n",
- "\\gamma(p_b,t) &= \\sum_{\\pi \\in Y : \\pi_t = blank} \\Pr(\\pi \\, | \\, x)\n",
- "\\end{align}\n",
- "$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Thus, for a length $T$ input sequence $x$, $\\Pr(p \\, | \\, x) = \\gamma(p_n, T) + \\gamma(p_b, T)$. Also let $\\Pr(p . . . \\, | \\, x)$ be the cumulative probability of all labellings not equal to $p$ of which $p$ is a prefix, then"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr(p . . . \\, | \\, x) = \\sum_{l \\ne \\emptyset} \\Pr(p + l \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where $\\emptyset$ denotes the empty sequence. With these definitions is mind, the pseudocode for prefix search decoding is given as follows:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Given enough time, prefix search decoding always finds the most probable labelling. However, the maximum number of prefixes it must expand grows exponentially with the input sequence length. If the output distribution is sufficiently peaked around the mode, it will still finish in reasonable time. But for many tasks, a heuristic is required to make its application feasible."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Observing that the outputs of a trained CTC network tend to form a \"series of spikes separated by strongly predicted blanks\", we can divide the output sequence into sections that are very likely to begin and end with a blank. We do this by choosing boundary points where the probability of observing a blank label is above a certain threshold. We then apply the prefix search decoding algorithm to each section individually and concatenate these to get the final transcription."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "In practice, prefix search works well with this heuristic, and generally outperforms best path decoding. However it still makes mistakes in some cases, for example if the same label is predicted weakly on both sides of a section boundary."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Constrained Decoding"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "For certain tasks we want to constrain the output labellings according to some predefined grammar. For example, in speech and handwriting recognition, the final transcriptions are usually required to form sequences of dictionary words. In addition it is common practice to use a language model to weight the probabilities of particular sequences of words."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "We can express these constraints by altering the label sequence probabilities in"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$l^* \\equiv \\underset{l}{\\operatorname{argmax}} \\Pr(l \\, | \\, x)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "to be conditioned on some probabilistic grammar $G$, as well as the input sequence $x$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$l^* \\equiv \\underset{l}{\\operatorname{argmax}} \\Pr(l \\, | \\, x, G)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Absolute requirements, for example that $l$ contains only dictionary words, can be incorporated by setting the probability of all sequences that fail to meet them to $0$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "At first sight, conditioning on $G$ would seem to contradict a basic assumption of CTC: that the labels are conditionally independent given the input sequences. Since the network attempts to model the probability of the whole labelling at once, there is nothing to stop it from learning inter-label transitions direct from the data, which would then be skewed by the external grammar. Indeed, when we tried using a biphone model to decode a CTC network trained for phoneme recognition, the error rate increased. However, CTC networks are typically only able to learn local relationships such as commonly occurring pairs or triples of labels. Therefore as long as $G$ focuses on long range label dependencies (such as the probability of one word following another when the outputs are letters) it doesn’t interfere with the dependencies modelled internally by CTC."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Applying the basic rules of probability we obtain"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr(l \\, | \\, x, G) = \\frac{\\Pr(l \\, | \\, x) \\Pr(l \\, | \\, G) \\Pr(x)}{\\Pr(x \\, | \\, G) \\Pr(l)}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "where we have used the fact that $x$ is conditionally independent of $G$ given $l$. If we assume that $x$ is independent of $G$, this reduces to"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$\\Pr(l \\, | \\, x, G) = \\frac{\\Pr(l \\, | \\, x) \\Pr(l \\, | \\, G)}{\\Pr(l)}$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "This assumption is in general false, since both the input sequences and the grammar depend on the underlying generator of the data, for example the language being spoken. However it is a reasonable first approximation, and is particularly justifiable in cases where the grammar is created using data other than that from which $x$ was drawn (as is common practice in speech and handwriting recognition, where separate textual corpora are used to generate language models)."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "If we further assume that, prior to any knowledge about the input or the grammar, all label sequences are equally probable, then"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$l^* \\equiv \\underset{l}{\\operatorname{argmax}} \\Pr(l \\, | \\, x, G)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- " reduces to"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "$$l^* \\equiv \\underset{l}{\\operatorname{argmax}} \\Pr(l \\, | \\, x) \\Pr(l \\, | \\, G)$$"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Note that, since the number of possible label sequences is finite (because both $A$ and $S$ are finite), assigning equal prior probabilities does not lead to an improper prior."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### CTC Token Passing Algorithm"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "We now describe an algorithm, based on the *token passing algorithm* for HMMs[[7]](ftp://mi.eng.cam.ac.uk/pub/reports/auto-pdf/young_tr38.pdf), that finds an approximate solution to the previous equation for a simple grammar."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Let $G$ consist of a dictionary $D$ containing $W$ words, and an optional set of $W^2$ bigrams $\\Pr(w \\, | \\, \\hat{w})$ that define the probability of making a transition from word $\\hat{w}$ to word $w$. The probability of any label sequence that does not form a sequence of dictionary words is $0$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "For each word $w$, define the modified word $w′$ as $w$ with blanks added at the beginning and end and between each pair of labels. Therefore $|w′| = 2|w| + 1$. Define a token $tok = (score,history)$ to be a pair consisting of a real valued ‘score’ and a ‘history’ of previously visited words. The history corresponds to the path through the network outputs the token has taken so far, and the score is the log probability of that path. The basic idea of the token passing algorithm is to pass along the highest scoring tokens at every word state, then maximise over these to find the highest scoring tokens at the next state. The transition probabilities are used when a token is passed from the last state in one word to the first state in another. The output word sequence is then given by the history of the highest scoring end-of-word token at the final timestep."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "At every timestep $t$ of the length $T$ output sequence, each segment $s$ of each modified word $w′$ holds a single token $tok(w,s,t)$. This is the highest scoring token reaching that segment at that time. Define the input token $tok(w,0,t)$ to be the highest scoring token arriving at word $w$ at time $t$, and the output token $tok(w, −1, t)$ to be the highest scoring token leaving word $w$ at time $t$. $\\emptyset$ denotes the empty sequence."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "The pseudocode for the algorithm is here"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Computational Complexity"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "If bigrams are used, the CTC token passing algorithm has a worst-case complexity of $\\mathcal{O}(TW^2)$, since line 19 requires a potential search through all $W$ words. However, because the output tokens $tok(w,−1,T)$ are sorted in order of score, the search can be terminated when a token is reached whose score is less than the current best score with the transition included. The typical complexity is therefore considerably lower, with a lower bound of $\\mathcal{O}(TW \\log W )$ to account for the sort."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "If no bigrams are used, the single most probable output token at the previous timestep will form the new input token for all the words, and the worst-case complexity reduces to $\\mathcal{O}(TW)$."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Single Word Decoding"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "If the number of words in the target sequence is fixed, the previous algorithm can be constrained by forbidding all tokens whose history already contains that many words from transitioning to new words. In particular, if the target sequences are constrained to be single words, then all word-to-word transitions are forbidden (and bigrams are clearly not required)."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "In general the extension from finding the single best transcription to the $N$-best transcriptions is complex. However, in the special case of single word decoding, the $N$-best transcriptions are simply the (single word) histories of the $N$-best output tokens when the algorithm terminates."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Another straightforward extension to single word decoding occurs when the same word has several different label transcriptions. This happens, for example, when pronunciation variants are considered in speech recognition, or spelling variants are allowed in handwriting recognition. In that case all variants should be considered separate words until the termination of the previous algorithm (lines 34 and 34); at that point the scores of all variant transcriptions of each word should be added together in the log scale; thereafter the best or $N$-best words should be found as usual."
- ]
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
}
],
"metadata": {
diff --git a/data/smoke_test/LDC93S1.txt b/data/smoke_test/LDC93S1.txt
new file mode 100644
index 00000000..2ef58538
--- /dev/null
+++ b/data/smoke_test/LDC93S1.txt
@@ -0,0 +1 @@
+0 46797 She had your dark suit in greasy wash water all year.
diff --git a/data/smoke_test/LDC93S1.wav b/data/smoke_test/LDC93S1.wav
new file mode 100644
index 00000000..62b65f8f
Binary files /dev/null and b/data/smoke_test/LDC93S1.wav differ
diff --git a/images/Figure.png b/images/Figure.png
deleted file mode 100644
index d9de2623..00000000
Binary files a/images/Figure.png and /dev/null differ
diff --git a/images/Lattice.png b/images/Lattice.png
deleted file mode 100644
index bdc78331..00000000
Binary files a/images/Lattice.png and /dev/null differ
diff --git a/images/PrefixDecoding.png b/images/PrefixDecoding.png
deleted file mode 100644
index f227f76a..00000000
Binary files a/images/PrefixDecoding.png and /dev/null differ
diff --git a/images/PrefixSearchDecoding.png b/images/PrefixSearchDecoding.png
deleted file mode 100644
index 985cbb56..00000000
Binary files a/images/PrefixSearchDecoding.png and /dev/null differ
diff --git a/images/TokenPassing.png b/images/TokenPassing.png
deleted file mode 100644
index 5af02905..00000000
Binary files a/images/TokenPassing.png and /dev/null differ
diff --git a/util/__init__.py b/util/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/util/audio/__init__.py b/util/audio/__init__.py
new file mode 100644
index 00000000..d76337ea
--- /dev/null
+++ b/util/audio/__init__.py
@@ -0,0 +1,80 @@
+import numpy as np
+import scipy.io.wavfile as wav
+
+from python_speech_features import mfcc
+
+def audiofiles_to_audio_data_sets(audio_filenames, numcep, numcontext):
+ # Define audio_data_sets to return
+ inputs = []
+ input_seq_lens = []
+
+ # Loop over audio_filenames
+ for audio_filename in audio_filenames:
+ # Load wav files
+ fs, audio = wav.read(audio_filename)
+
+ # Get mfcc coefficients
+ orig_inputs = mfcc(audio, samplerate=fs, numcep=numcep)
+
+ # For each time slice of the training set, we need to copy the context this makes
+ # the numcep dimensions vector into a numcep + 2*numcep*numcontext dimensions
+ # because of:
+ # - numcep dimensions for the current mfcc feature set
+ # - numcontext*numcep dimensions for each of the past and future (x2) mfcc feature set
+ # => so numcep + 2*numcontext*numcep
+ train_inputs = np.array([], np.float)
+ train_inputs.resize((orig_inputs.shape[0], numcep + 2*numcep*numcontext))
+
+ # Prepare pre-fix post fix context (TODO: Fill empty_mfcc with MCFF of silence)
+ empty_mfcc = np.array([])
+ empty_mfcc.resize((numcep))
+
+ # Prepare train_inputs with past and future contexts
+ time_slices = range(train_inputs.shape[0])
+ context_past_min = time_slices[0] + numcontext
+ context_future_max = time_slices[-1] - numcontext
+ for time_slice in time_slices:
+ ### Reminder: array[start:stop:step]
+ ### slices from indice |start| up to |stop| (not included), every |step|
+ # Pick up to numcontext time slices in the past, and complete with empty
+ # mfcc features
+ need_empty_past = max(0, (context_past_min - time_slice))
+ empty_source_past = list(empty_mfcc for empty_slots in range(need_empty_past))
+ data_source_past = orig_inputs[max(0, time_slice - numcontext):time_slice]
+ assert(len(empty_source_past) + len(data_source_past) == numcontext)
+
+ # Pick up to numcontext time slices in the future, and complete with empty
+ # mfcc features
+ need_empty_future = max(0, (time_slice - context_future_max))
+ empty_source_future = list(empty_mfcc for empty_slots in range(need_empty_future))
+ data_source_future = orig_inputs[time_slice + 1:time_slice + numcontext + 1]
+ assert(len(empty_source_future) + len(data_source_future) == numcontext)
+
+ if need_empty_past:
+ past = np.concatenate((empty_source_past, data_source_past))
+ else:
+ past = data_source_past
+
+ if need_empty_future:
+ future = np.concatenate((data_source_future, empty_source_future))
+ else:
+ future = data_source_future
+
+ past = np.reshape(past, numcontext*numcep)
+ now = orig_inputs[time_slice]
+ future = np.reshape(future, numcontext*numcep)
+
+ train_inputs[time_slice] = np.concatenate((past, now, future))
+ assert(len(train_inputs[time_slice]) == numcep + 2*numcep*numcontext)
+
+ # Whiten inputs (TODO: Should we whiten)
+ train_inputs = (train_inputs - np.mean(train_inputs))/np.std(train_inputs)
+
+ # Obtain array of sequence lengths
+ input_seq_lens.append(train_inputs.shape[0])
+
+ # Convert train_inputs to proper form
+ inputs.append(train_inputs)
+
+ # Return results
+ return (np.asarray(inputs), input_seq_lens)
diff --git a/util/importers/__init__.py b/util/importers/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/util/importers/ted_lium/__init__.py b/util/importers/ted_lium/__init__.py
new file mode 100644
index 00000000..e4a406a3
--- /dev/null
+++ b/util/importers/ted_lium/__init__.py
@@ -0,0 +1,86 @@
+import numpy as np
+
+from os import path
+from util.text import text_to_sparse_tuple
+from util.audio import audiofiles_to_audio_data_sets
+
+class DataSets(object):
+ def __init__(self, train, validation, test):
+ self._train = train
+ self._validation = validation
+ self._test = test
+
+ @property
+ def train(self):
+ return self._train
+
+ @property
+ def validation(self):
+ return self._validation
+
+ @property
+ def test(self):
+ return self._test
+
+class DataSet(object):
+ def __init__(self, inputs, outputs, seq_len):
+ self._offset = 0
+ self._inputs = inputs
+ self._outputs = outputs
+ self._seq_len = seq_len
+
+ def next_batch(self, batch_size):
+ next_batch = (self._inputs, self._outputs, self._seq_len) # TODO: Choose only batch_size elements
+ self._offset += batch_size
+ return next_batch
+
+ @property
+ def max_batch_seq_len(self):
+ return np.amax(self._seq_len)
+
+ @property
+ def num_examples(self):
+ return self._inputs.shape[0]
+
+
+def read_data_sets(data_dir, numcep, numcontext):
+ # Get train data
+ train_outputs = read_text_data_sets(data_dir, 'train')
+ train_inputs, train_seq_len = read_audio_data_sets(data_dir, numcep, numcontext, 'train')
+ # Get validation data
+ validation_outputs = read_text_data_sets(data_dir, 'validation')
+ validation_inputs, validation_seq_len = read_audio_data_sets(data_dir, numcep, numcontext, 'validation')
+ # Get test data
+ test_outputs = read_text_data_sets(data_dir, 'test')
+ test_inputs, test_seq_len = read_audio_data_sets(data_dir, numcep, numcontext, 'test')
+
+ # Create train, validation, and test DataSet's
+ train = DataSet(inputs=train_inputs, outputs=train_outputs, seq_len=train_seq_len)
+ validation = DataSet(inputs=validation_inputs, outputs=validation_outputs, seq_len=validation_seq_len)
+ test = DataSet(inputs=test_inputs, outputs=test_outputs, seq_len=test_seq_len)
+
+ # Return DataSets
+ return DataSets(train=train, validation=validation, test=test)
+
+
+def read_text_data_sets(data_dir, data_type):
+ # TODO: Do not ignore data_type = ['train'|'validation'|'test']
+
+ # Create file names
+ text_filename = path.join(data_dir, 'LDC93S1.txt')
+
+ # Read text file and create list of sentence's words w/spaces replaced by ''
+ with open(text_filename, 'rb') as f:
+ for line in f.readlines():
+ original = ' '.join(line.strip().lower().split(' ')[2:]).replace('.', '')
+
+ return text_to_sparse_tuple([original])
+
+def read_audio_data_sets(data_dir, numcep, numcontext, data_type):
+ # TODO: Do not ignore data_type = ['train'|'validation'|'test']
+
+ # Create file name
+ audio_filename = path.join(data_dir, 'LDC93S1.wav')
+
+ # Return properly formatted data
+ return audiofiles_to_audio_data_sets([audio_filename], numcep, numcontext)
diff --git a/util/text/__init__.py b/util/text/__init__.py
new file mode 100644
index 00000000..149aa535
--- /dev/null
+++ b/util/text/__init__.py
@@ -0,0 +1,50 @@
+import numpy as np
+
+# Constants
+SPACE_TOKEN = ''
+SPACE_INDEX = 0
+FIRST_INDEX = ord('a') - 1 # 0 is reserved to space
+
+
+def text_to_sparse_tuple(originals):
+ # Define list to hold results
+ results = []
+
+ # Process each original in originals
+ for original in originals:
+ # Create list of sentence's words w/spaces replaced by ''
+ result = original.replace(' ', ' ')
+ result = result.split(' ')
+
+ # Tokenize words into letters adding in SPACE_TOKEN where required
+ result = np.hstack([SPACE_TOKEN if xt == '' else list(xt) for xt in result])
+
+ # Map characters into indicies
+ result = np.asarray([SPACE_INDEX if xt == SPACE_TOKEN else ord(xt) - FIRST_INDEX for xt in result])
+
+ # Add result to results
+ results.append(result)
+
+ # Creating sparse representation to feed the placeholder
+ return sparse_tuple_from(results)
+
+
+def sparse_tuple_from(sequences, dtype=np.int32):
+ """Create a sparse representention of x.
+ Args:
+ sequences: a list of lists of type dtype where each element is a sequence
+ Returns:
+ A tuple with (indices, values, shape)
+ """
+ indices = []
+ values = []
+
+ for n, seq in enumerate(sequences):
+ indices.extend(zip([n]*len(seq), xrange(len(seq))))
+ values.extend(seq)
+
+ indices = np.asarray(indices, dtype=np.int64)
+ values = np.asarray(values, dtype=dtype)
+ shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1]+1], dtype=np.int64)
+
+ return indices, values, shape