repo_id
stringlengths 18
103
| file_path
stringlengths 30
136
| content
stringlengths 2
3.36M
| __index_level_0__
int64 0
0
|
---|---|---|---|
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/reverse.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Functions and classes to sort arcs in an FST.
#ifndef FST_REVERSE_H_
#define FST_REVERSE_H_
#include <algorithm>
#include <vector>
#include <fst/cache.h>
namespace fst {
// Reverses an FST. The reversed result is written to an output mutable FST.
// If A transduces string x to y with weight a, then the reverse of A
// transduces the reverse of x to the reverse of y with weight a.Reverse().
//
// Typically, a = a.Reverse() and an arc is its own reverse (e.g., for
// TropicalWeight or LogWeight). In general, e.g., when the weights only form a
// left or right semiring, the output arc type must match the input arc type
// except having the reversed Weight type.
//
// When require_superinitial is false, a superinitial state is not created in
// the reversed FST iff the input FST has exactly one final state (which becomes
// the initial state of the reversed FST) with a final weight of semiring One,
// or if it does not belong to any cycle. When require_superinitial is true, a
// superinitial state is always created.
template <class FromArc, class ToArc>
void Reverse(const Fst<FromArc> &ifst, MutableFst<ToArc> *ofst,
bool require_superinitial = true) {
using StateId = typename FromArc::StateId;
using FromWeight = typename FromArc::Weight;
using ToWeight = typename ToArc::Weight;
ofst->DeleteStates();
ofst->SetInputSymbols(ifst.InputSymbols());
ofst->SetOutputSymbols(ifst.OutputSymbols());
if (ifst.Properties(kExpanded, false)) {
ofst->ReserveStates(CountStates(ifst) + 1);
}
StateId istart = ifst.Start();
StateId ostart = kNoStateId;
StateId offset = 0;
uint64_t dfs_iprops = 0;
uint64_t dfs_oprops = 0;
if (!require_superinitial) {
for (StateIterator<Fst<FromArc>> siter(ifst); !siter.Done(); siter.Next()) {
const auto s = siter.Value();
if (ifst.Final(s) == FromWeight::Zero()) continue;
if (ostart != kNoStateId) {
ostart = kNoStateId;
break;
} else {
ostart = s;
}
}
if (ostart != kNoStateId && ifst.Final(ostart) != FromWeight::One()) {
std::vector<StateId> scc;
SccVisitor<FromArc> scc_visitor(&scc, nullptr, nullptr, &dfs_iprops);
DfsVisit(ifst, &scc_visitor);
if (count(scc.begin(), scc.end(), scc[ostart]) > 1) {
ostart = kNoStateId;
} else {
for (ArcIterator<Fst<FromArc>> aiter(ifst, ostart); !aiter.Done();
aiter.Next()) {
if (aiter.Value().nextstate == ostart) {
ostart = kNoStateId;
break;
}
}
}
if (ostart != kNoStateId) dfs_oprops = kInitialAcyclic;
}
}
if (ostart == kNoStateId) { // Super-initial requested or needed.
ostart = ofst->AddState();
offset = 1;
}
for (StateIterator<Fst<FromArc>> siter(ifst); !siter.Done(); siter.Next()) {
const auto is = siter.Value();
const auto os = is + offset;
while (ofst->NumStates() <= os) ofst->AddState();
if (is == istart) ofst->SetFinal(os, ToWeight::One());
const auto weight = ifst.Final(is);
if ((weight != FromWeight::Zero()) && (offset == 1)) {
const ToArc oarc(0, 0, weight.Reverse(), os);
ofst->AddArc(0, oarc);
}
for (ArcIterator<Fst<FromArc>> aiter(ifst, is); !aiter.Done();
aiter.Next()) {
const auto &iarc = aiter.Value();
const auto nos = iarc.nextstate + offset;
auto weight = iarc.weight.Reverse();
if (!offset && (nos == ostart)) {
weight = Times(ifst.Final(ostart).Reverse(), weight);
}
const ToArc oarc(iarc.ilabel, iarc.olabel, weight, os);
while (ofst->NumStates() <= nos) ofst->AddState();
ofst->AddArc(nos, oarc);
}
}
ofst->SetStart(ostart);
if (offset == 0 && ostart == istart) {
ofst->SetFinal(ostart, ifst.Final(ostart).Reverse());
}
const auto iprops = ifst.Properties(kCopyProperties, false) | dfs_iprops;
const auto oprops = ofst->Properties(kFstProperties, false) | dfs_oprops;
ofst->SetProperties(ReverseProperties(iprops, offset == 1) | oprops,
kFstProperties);
}
} // namespace fst
#endif // FST_REVERSE_H_
| 0 |
coqui_public_repos/TTS/tests | coqui_public_repos/TTS/tests/vocoder_tests/test_vocoder_wavernn.py | import random
import numpy as np
import torch
from TTS.vocoder.configs import WavernnConfig
from TTS.vocoder.models.wavernn import Wavernn, WavernnArgs
def test_wavernn():
config = WavernnConfig()
config.model_args = WavernnArgs(
rnn_dims=512,
fc_dims=512,
mode="mold",
mulaw=False,
pad=2,
use_aux_net=True,
use_upsample_net=True,
upsample_factors=[4, 8, 8],
feat_dims=80,
compute_dims=128,
res_out_dims=128,
num_res_blocks=10,
)
config.audio.hop_length = 256
config.audio.sample_rate = 2048
dummy_x = torch.rand((2, 1280))
dummy_m = torch.rand((2, 80, 9))
y_size = random.randrange(20, 60)
dummy_y = torch.rand((80, y_size))
# mode: mold
model = Wavernn(config)
output = model(dummy_x, dummy_m)
assert np.all(output.shape == (2, 1280, 30)), output.shape
# mode: gauss
config.model_args.mode = "gauss"
model = Wavernn(config)
output = model(dummy_x, dummy_m)
assert np.all(output.shape == (2, 1280, 2)), output.shape
# mode: quantized
config.model_args.mode = 4
model = Wavernn(config)
output = model(dummy_x, dummy_m)
assert np.all(output.shape == (2, 1280, 2**4)), output.shape
output = model.inference(dummy_y, True, 5500, 550)
assert np.all(output.shape == (256 * (y_size - 1),))
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/script/fstscript.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// The FST script interface permits users to interact with FSTs without knowing
// their arc type. It does this by mapping compile-time polymorphism (in the
// form of a arc-templated FST types) onto a shared virtual interface. It also
// supports arc extension via a DSO interface. Due to the overhead of virtual
// dispatch and registered function lookups, the script API is somewhat slower
// then library API provided by types like StdVectorFst, but has the advantage
// that it is designed not to crash (and to provide useful debugging
// information) upon common user errors like passing invalid indices or
// attempting comparison of incompatible FSTs. It is used both by the FST
// binaries and the Python extension.
//
// This header includes all of the FST script functionality.
#ifndef FST_SCRIPT_FSTSCRIPT_H_
#define FST_SCRIPT_FSTSCRIPT_H_
// Major classes
#include <fst/script/arciterator-class.h>
#include <fst/script/encodemapper-class.h>
#include <fst/script/fst-class.h>
#include <fst/script/stateiterator-class.h>
#include <fst/script/text-io.h>
#include <fst/script/weight-class.h>
// Flag-to-enum parsers.
#include <fst/script/getters.h>
// Templates like Operation<> and Apply<>.
#include <fst/script/script-impl.h>
// Operations.
#include <fst/script/arcsort.h>
#include <fst/script/closure.h>
#include <fst/script/compile.h>
#include <fst/script/compose.h>
#include <fst/script/concat.h>
#include <fst/script/connect.h>
#include <fst/script/convert.h>
#include <fst/script/decode.h>
#include <fst/script/determinize.h>
#include <fst/script/difference.h>
#include <fst/script/disambiguate.h>
#include <fst/script/draw.h>
#include <fst/script/encode.h>
#include <fst/script/epsnormalize.h>
#include <fst/script/equal.h>
#include <fst/script/equivalent.h>
#include <fst/script/info.h>
#include <fst/script/intersect.h>
#include <fst/script/invert.h>
#include <fst/script/isomorphic.h>
#include <fst/script/map.h>
#include <fst/script/minimize.h>
#include <fst/script/print.h>
#include <fst/script/project.h>
#include <fst/script/prune.h>
#include <fst/script/push.h>
#include <fst/script/randequivalent.h>
#include <fst/script/randgen.h>
#include <fst/script/relabel.h>
#include <fst/script/replace.h>
#include <fst/script/reverse.h>
#include <fst/script/reweight.h>
#include <fst/script/rmepsilon.h>
#include <fst/script/shortest-distance.h>
#include <fst/script/shortest-path.h>
#include <fst/script/synchronize.h>
#include <fst/script/topsort.h>
#include <fst/script/union.h>
#include <fst/script/verify.h>
// This class is necessary because registering each of the operations
// separately overfills the stack, as there's so many of them.
namespace fst {
namespace script {
template <class Arc>
class AllFstOperationsRegisterer {
public:
AllFstOperationsRegisterer() {
RegisterBatch1();
RegisterBatch2();
}
private:
void RegisterBatch1() {
REGISTER_FST_OPERATION(ArcSort, Arc, ArcSortArgs);
REGISTER_FST_OPERATION(Closure, Arc, ClosureArgs);
REGISTER_FST_OPERATION(CompileFstInternal, Arc, CompileFstArgs);
REGISTER_FST_OPERATION(Compose, Arc, ComposeArgs);
REGISTER_FST_OPERATION(Concat, Arc, ConcatArgs1);
REGISTER_FST_OPERATION(Concat, Arc, ConcatArgs2);
REGISTER_FST_OPERATION(Connect, Arc, MutableFstClass);
REGISTER_FST_OPERATION(Convert, Arc, ConvertArgs);
REGISTER_FST_OPERATION(Decode, Arc, DecodeArgs1);
REGISTER_FST_OPERATION(Decode, Arc, DecodeArgs2);
REGISTER_FST_OPERATION(Determinize, Arc, DeterminizeArgs);
REGISTER_FST_OPERATION(Difference, Arc, DifferenceArgs);
REGISTER_FST_OPERATION(Disambiguate, Arc, DisambiguateArgs);
REGISTER_FST_OPERATION(DrawFst, Arc, FstDrawerArgs);
REGISTER_FST_OPERATION(Encode, Arc, EncodeArgs1);
REGISTER_FST_OPERATION(Encode, Arc, EncodeArgs2);
REGISTER_FST_OPERATION(EpsNormalize, Arc, EpsNormalizeArgs);
REGISTER_FST_OPERATION(Equal, Arc, EqualArgs);
REGISTER_FST_OPERATION(Equivalent, Arc, EquivalentArgs);
REGISTER_FST_OPERATION(PrintFstInfo, Arc, InfoArgs);
REGISTER_FST_OPERATION(GetFstInfo, Arc, GetInfoArgs);
REGISTER_FST_OPERATION(InitArcIteratorClass, Arc,
InitArcIteratorClassArgs);
REGISTER_FST_OPERATION(InitEncodeMapperClass, Arc,
InitEncodeMapperClassArgs);
REGISTER_FST_OPERATION(InitMutableArcIteratorClass, Arc,
InitMutableArcIteratorClassArgs);
REGISTER_FST_OPERATION(InitStateIteratorClass, Arc,
InitStateIteratorClassArgs);
}
void RegisterBatch2() {
REGISTER_FST_OPERATION(Intersect, Arc, IntersectArgs);
REGISTER_FST_OPERATION(Invert, Arc, MutableFstClass);
REGISTER_FST_OPERATION(Map, Arc, MapArgs);
REGISTER_FST_OPERATION(Minimize, Arc, MinimizeArgs);
REGISTER_FST_OPERATION(PrintFst, Arc, FstPrinterArgs);
REGISTER_FST_OPERATION(Project, Arc, ProjectArgs);
REGISTER_FST_OPERATION(Prune, Arc, PruneArgs1);
REGISTER_FST_OPERATION(Prune, Arc, PruneArgs2);
REGISTER_FST_OPERATION(Push, Arc, PushArgs1);
REGISTER_FST_OPERATION(Push, Arc, PushArgs2);
REGISTER_FST_OPERATION(RandEquivalent, Arc, RandEquivalentArgs);
REGISTER_FST_OPERATION(RandGen, Arc, RandGenArgs);
REGISTER_FST_OPERATION(Relabel, Arc, RelabelArgs1);
REGISTER_FST_OPERATION(Relabel, Arc, RelabelArgs2);
REGISTER_FST_OPERATION(Replace, Arc, ReplaceArgs);
REGISTER_FST_OPERATION(Reverse, Arc, ReverseArgs);
REGISTER_FST_OPERATION(Reweight, Arc, ReweightArgs);
REGISTER_FST_OPERATION(RmEpsilon, Arc, RmEpsilonArgs);
REGISTER_FST_OPERATION(ShortestDistance, Arc, ShortestDistanceArgs1);
REGISTER_FST_OPERATION(ShortestDistance, Arc, ShortestDistanceArgs2);
REGISTER_FST_OPERATION(ShortestPath, Arc, ShortestPathArgs);
REGISTER_FST_OPERATION(Synchronize, Arc, SynchronizeArgs);
REGISTER_FST_OPERATION(TopSort, Arc, TopSortArgs);
REGISTER_FST_OPERATION(Union, Arc, UnionArgs);
REGISTER_FST_OPERATION(Verify, Arc, VerifyArgs);
}
};
} // namespace script
} // namespace fst
#define REGISTER_FST_OPERATIONS(Arc) \
AllFstOperationsRegisterer<Arc> register_all_fst_operations##Arc;
#endif // FST_SCRIPT_FSTSCRIPT_H_
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/NEWS | OpenFst: Release 1.6
* The `first_path` option to ShortestPath is now optimal for A* (1.6.7)
* Renames SymbolTable::kNoSymbol to kNoSymbol (1.6.7)
* Exposes PowerMapper to the scripting API (1.6.7)
* Fixes linking of the special SOs (1.6.7)
* Fixes error handling in HashMatcher (1.6.6)
* Adds kShortestDelta for operations dependent on shortest-distance (1.6.6)
* Adds Python methods for (un)pickling and (de)serializing FSTs (1.6.6)
* Adds constructive variants of Invert and Project (1.6.6)
* Increases code sharing in MemoryPool/MemoryArena (1.6.6)
* Improves consistency of matcher FST ownership (1.6.6)
* Adds non-trivial A* estimator class (1.6.6)
* Prevents unreachable code generation in libfstscript (1.6.5)
* Adds move constructors for non-trivial weight types (1.6.5)
* Standardizes method names for tuple weight types (1.6.5)
* Eliminates undefined behavior in weight hashing (1.6.5)
* Optimizes binary search in SortedMatcher (1.6.5)
* Adds SetWeight (1.6.5)
* Fixes typing error in Python FAR reader (1.6.4)
* Removes restriction that Prune argument have commutative weights (1.6.3)
* Improves configuration of CompositeWeight readers and writers (1.6.3)
* Improves accuracy of ShortestDistance summation (1.6.3)
* SetFinal now "moves" its weight argument (1.6.3)
* Exposes ArcIterator and EncodeMapper flags in Python (1.6.3)
* Properly sets return codes in FST binaries (1.6.3)
* Eliminates StringWeight macros (1.6.3)
* Finalizes most virtual method overrides (1.6.2)
* Fixes missing includes of <fst/log.h> (1.6.1)
* Adds float format support to FST drawing (1.6.1)
* Extensive modernization for C++11 style (1.6.0)
* Many classes and constants moved into an internal namespace (1.6.0)
* Adds HashMatcher (1.6.0)
* Adds Member method to SymbolTable (1.6.0)
* Adds the "special" extension and the fstspecial binary; this is similar to
fstconvert but accepts arguments for specifying special labels (phi, rho,
and sigma) of FSTs (1.6.0)
* Exposes allow_negative_label option for Python symbol tables (1.6.0)
OpenFst: Release 1.5
* Added p-subsequential determinization (1.5.0)
* Generalized epsilon normalization to non-functional case (1.5.0)
* Added general gallic (plus is union) semiring (1.5.0)
* Added FST compression extension (1.5.0)
* Added Python extension (1.5.0)
* Added multiple pushdown transducer (MPDT) support (1.5.0)
* Fixed Isomorphic function (1.5.0)
* Added final method to matchers (1.5.0)
* Fixed various compiler issues (1.5.0)
* Fixed missing Isomorphic components (1.5.0)
* Added UnionWeight (1.5.0)
* Added InputEpsilonMapper and OutputEpsilonMapper arc mappers (1.5.1)
* Added TrivialComposeFilter for more efficient composition when one
of the arguments is epsilon-free (1.5.1)
* Added properties bits kUnweightedCycles and kWeightedCycles (1.5.1)
* Added missing const qualification to (1.5.1):
- SymbolTableIterator access
- EncodeMapper writing to file
- EncodeMapper SymbolTable access
* Replaced internal custom reference-counting (RefCounter) with
C++11 smart pointers where possible, and fixed associated
reference-counting bugs (1.5.1)
* When calling DeleteStates on a MutableFst with a shared impl, the impl
is set to a new empty impl rather than copying and deleting (1.5.1)
* Prepended `Pdt` to the Expand libraries and classes in the PDT
extension, and prepended `MPdt` to the Expand libraries and classes
in the MPDT extension, so that both can be used in the same compilation
unit (1.5.1)
* Added option to PDT Replace for compiling a strongly-regular RTN into a
bounded-stack PDT (1.5.1)
* Improved symbol table support for PDT Replace, including automatic
generation of parentheses symbols (1.5.1)
* Improvements to scripting API (1.5.1):
- Added methods for FST access and mutation
- Added additional checks for arc/weight compatibility
- WeightClass::One and WeightClass::Zero now require a specified weight
type at time of construction
- Improved VectorFstClass constructors
- Added linear-time check for cyclic dependencies in Replace
- Added EncodeMapperClass, a template-free box for an EncodeMapper
* Improvements to the binaries (1.5.1):
- Fixed no-op --precision flag to fstdraw (1.5.1)
- Fixed no-op --file_list_input flag to farcreate (1.5.1)
* Improvements to the Python extension (1.5.1):
- Added methods for creating an empty mutable FST
- Added methods for FST access via state and arc iteration
- Added FST compilation from arclists (cf. fstcompile)
- Added FST printing and drawing
- Added FarReader and FarWriter classes.
* FarReader's GetFst method now returns a pointer (1.5.2)
* Fixed FSTERROR macro (1.5.2)
* Fixed build flags for dlopen (1.5.2)
* Consolidated Python extension into single module (1.5.2)
* Python add_arc now takes an Arc object (1.5.2)
* Adds optional minimization of non-deterministic FSTs (1.5.3)
* Mutation methods of the Python Fst object now support chaining (1.5.3)
* Scripting API and Python weight objects now support semiring arithmetic
(1.5.3)
* Adds RemoveSymbol method to SymbolTable (1.5.4)
* Prevents underflow when using LogProbArcSelector in random generation
(1.5.4)
* Makes random weight generators a single template class (1.5.4)
* Makes weight Properties constexpr where possible (1.5.4)
* Adds check for error when opening files when compiling strings into FARs
(1.5.4)
* Adds routines for parsing string flags to the scripting API (1.5.4)
OpenFst: Release 1.4
* Port to C++11 (1.4.0)
* Disambiguate function added (1.4.0)
* Isomorphic function added (1.4.0)
* Matcher interface augmented with Priority method.
* Special matchers (rho/sigma/phi) can match special symbols
on both input FSTs in composition/intersection provided at each
state pair they only match one side (1.4.0)
* Added ExplicitMatcher to suppress implicit matches (e.g. epsilon
self-loops) (1.4.0)
* Linear{Tagger,Classifier}Fst extensions added (1.4.0).
* Generalized state-reachable to work when input is cyclic (so long as no
final state is in a cycle). This ensures label-reachable (and hence label
lookahead) works with cyclic input (1.4.0)
* Added Condense to build the condensation graph (SCCs condensed to single
states) of an FST (1.4.0).
* Added an option to Reverse to specify whether a super-initial state
should always be created (1.4.0).
* Fixed bugs in FirstCacheStore, PowerWeight, and StringCompiler (1.4.0).
* Changed SymbolTable to use faster data structure (1.4.0).
* Added 'min' disambiguation in determinizaton to keep only the minimum
output in a non-functional transducer when plus=min/max
(flag --disambiguate_output) (1.4.1)
* Compiler issues in linear-fst fixed (1.4.1)
OpenFst: Release 1.3
* Support for non-fatal exits on errors: (1.3.1)
- Added FLAGS_fst_error_fatal: FST errors are
fatal if true (default); o.w. return objects flagged as bad:
e.g., FSTs - kError
prop. true, FST weights - not a Member().
- Added kError property bit signifying bad FST
- Added NoWeight() method to FST weight requirements that returns
weight that is not a Member().
* Various improvements to the FAR extensions (1.3.1)
- a single FST is now a FAR type
- FLAGS_initial_symbols: Uses the symbol table from the
first FST in the archive for all entries"
- Input/output to standard input/output for some FAR and arc types
* --with-icu configuration option no longer needed (1.3.1)
* Improved flags usage esp. if use SET_FLAGS not SetFlags/InitFst (1.3.2)
* Added 'fst' as possible far writer type (1.3.2)
* phi matcher can now accept 0 as the phi label (1.3.2)
* Added ngram-fst extension (1.3.2)
* Improved performance of PDT composition (1.3.3)
* Memory-map support (1.3.3)
* Fixed cross-FST serialization issues (1.3.3)
* Fixed NGramFst off-by-one issue (1.3.3)
* farextract now allows one to specify a list of comma-separated keys,
including key ranges (1.3.3)
* Fixed bug in PDT replace that could cause close paren IDs to collide
with open paren IDs (1.3.4)
OpenFst: Release 1.2
* Added lookahead matching and filtering for faster composition
* Added EditFst for mutation of o.w. immutable FSTs
* Added script sub-namespace defining type FstClass, a non-templated
Fst<Arc> to hold the arc template type internally. This and FST
operations on it allow easier I/O and scripting at the cost of some
runtime dispatching.
* Added per-arc-iterator control of Fst caching.
* Added PowerWeight and Power Arc.
* Added SparsePowerWeight and SparsePowerArc (1.2.4)
* Added SignedLogWeight and SignedLogArc (1.2.4)
* Added ExpectationWeight and ExpectationArc (1.2.4)
* Added AStarQueue, PruneQueue and NaturalPruneQueue disciplines (1.2.6)
* Added Log64Weight and Log64Arc to FST library throughout, including
support throughout scripts/bins/dsos (1.2.8)
* Added delayed RandGenFst that outputs tree of paths weighted
by count (1.2.8)
* Added fstsymbols shell-level command
* Added total weight removal option to pushing
* Changed methods for symbol table mutation:
use MutableInputSymbols()/MutableOutputSymbols().
* Numerous efficiency improvements esp in composition, replace, and caching
* Made "fstmap" handle semiring conversion by adding "to_std", "to_log"
and "to_log64" as supported 'map_type' arguments (1.2.8).
* Made the destructive implementation of RmEpsilon skip over states
admitting no non-epsilon incoming transition (1.2.8).
* Fixed numerous bugs (1.2 through 1.2.9) including:
- improper types of some approximation deltas
- sub-optimal hashing functions
- issues in internal reuse of shortest distance
- hashing bug in FloatWeight
- bug in shortest path queue
- symbol table checksumming issues
- various C++ standards issues
- Visit() behavior when visitation aborted
- Decode() hash performance bug (1.2.1)
- EditFst::Copy(bool) method when the boolean parameter is true (1.2.7)
- SymbolTable memory leak in Invert() (1.2.8)
- Added escaping of " and \ in labels in fstdraw, needed for dot to
function properly (1.2.8)
- Fixed handling of final weight of start state in fstpush (1.2.8)
- Added FST_LL_FORMAT to fix 64-bit integer printf issues (1.2.9)
- Fixed missing <functional> includes (1.2.9)
- Fixed reused local variable names (1.2.9)
- Fixed passing string by reference in FstDraw args (1.2.9)
* Added extensions directories including:
- finite-state archive (FAR) utilities,
added stlist format supporting writing/reading to/from standard out/in
at the library-level (1.2.8)
- compact fsts
- lookahead fsts
- pushdown transducers (improved in 1.2.1 through 1.2.7).
* Added StateMap/StateMapFst; renamed Map/MapFst to ArcMap/ArcMapFst;
map/MapFst retained (but deprecated) (1.2.9)
* Deleted ArcSum() and ArcMerge; use StateMap w/ ArcSumMapper and
ArcUniqueMapper (1.2.9).
* Incremented version of ConstFst/CompactFsts to stop memory alignment
that fails on pipes. Made old version raises errors when read on
pipes (1.2.9).
* Improved determinize hash (1.2.9)
* Removed stdio uses (1.2.10)
* Fixed library ordering issues esp. with newer GNU build tools (1.2.10)
OpenFst: Release 1.1
* Added compat.h to src/include/fst to fix missing defines
* Fixed bug in acyclic minimization that led to non-minimal
(but equivalent) results
* Fixed missing FST typedef in various matchers in matcher.h
so that they can be cascaded
* Opened file streams binary where appropriate
OpenFst: Release 1.0 (Additions to beta version):
* Matcher class added for matching labels at FST states. Includes
special matchers for sigma (any), rho ('rest'), and phi ('fail')
labels.
* Composition generalized with arbitrary filters, matchers, and state
tables.
* Sequence and matching composition filters provided. (see compose.h,
compose-filter.h, matcher.h, state-table.h)
* Unique n-best (see shortest-path.h)
* Pruning in determinization and epsilon removal (see determinize.h,
rmepsilon.h)
* New Fst class:
* Compact Fsts for space-efficient representation (see compact-fst.h)
* New Weight classes:
* MinMax
* Lexicographic
* Miscellaneous bug fixes
| 0 |
coqui_public_repos/STT/native_client | coqui_public_repos/STT/native_client/dotnet/STT.sln | Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.2.32630.192
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "STTClient", "STTClient\STTClient.csproj", "{56DE4091-BBBE-47E4-852D-7268B33B971F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "STTConsole", "STTConsole\STTConsole.csproj", "{312965E5-C4F6-4D95-BA64-79906B8BC7AC}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "STTConsoleNetCore", "STTConsoleNetCore\STTConsoleNetCore.csproj", "{48369441-9293-4DCF-9794-0E85B148A694}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|x64.ActiveCfg = Debug|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|x64.Build.0 = Debug|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|Any CPU.Build.0 = Release|Any CPU
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|x64.ActiveCfg = Release|x64
{56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|x64.Build.0 = Release|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Debug|Any CPU.ActiveCfg = Debug|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Debug|Any CPU.Build.0 = Debug|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Debug|x64.ActiveCfg = Debug|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Debug|x64.Build.0 = Debug|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Release|Any CPU.ActiveCfg = Release|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Release|Any CPU.Build.0 = Release|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Release|x64.ActiveCfg = Release|x64
{312965E5-C4F6-4D95-BA64-79906B8BC7AC}.Release|x64.Build.0 = Release|x64
{48369441-9293-4DCF-9794-0E85B148A694}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Debug|Any CPU.Build.0 = Debug|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Debug|x64.ActiveCfg = Debug|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Debug|x64.Build.0 = Debug|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Release|Any CPU.ActiveCfg = Release|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Release|Any CPU.Build.0 = Release|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Release|x64.ActiveCfg = Release|Any CPU
{48369441-9293-4DCF-9794-0E85B148A694}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FC035D95-DBFD-4050-885A-A2DD9134B3AD}
EndGlobalSection
EndGlobal
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/native_client/swift.modulemap | module STTNative {
header "coqui-stt.h"
link "stt"
export *
}
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/test-python_39-darwin-amd64-opt.yml | build:
template_file: test-darwin-opt-base.tyml
dependencies:
- "darwin-amd64-cpu-opt"
- "test-training_16k-linux-amd64-py36m-opt"
- "homebrew_tests-darwin-amd64"
test_model_task: "test-training_16k-linux-amd64-py36m-opt"
args:
tests_cmdline: "$TASKCLUSTER_TASK_DIR/DeepSpeech/ds/taskcluster/tc-python-tests.sh 3.9.0: 16k"
metadata:
name: "DeepSpeech OSX AMD64 CPU Python v3.9 tests"
description: "Testing DeepSpeech for OSX/AMD64 on Python v3.9.0:m, CPU only, optimized version"
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/partition.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Functions and classes to create a partition of states.
#ifndef FST_PARTITION_H_
#define FST_PARTITION_H_
#include <algorithm>
#include <vector>
#include <fst/queue.h>
namespace fst {
namespace internal {
template <typename T>
class PartitionIterator;
// Defines a partitioning of elements, used to represent equivalence classes
// for FST operations like minimization. T must be a signed integer type.
//
// The elements are numbered from 0 to num_elements - 1.
// Initialize(num_elements) sets up the class for a given number of elements.
// We maintain a partition of these elements into classes. The classes are also
// numbered from zero; you can add a class with AddClass(), or add them in bulk
// with AllocateClasses(num_classes). Initially the elements are not assigned
// to any class; you set up the initial mapping from elements to classes by
// calling Add(element_id, class_id). You can also move an element to a
// different class by calling Move(element_id, class_id).
//
// We also support a rather specialized interface that allows you to efficiently
// split classes in the Hopcroft minimization algorithm. This maintains a
// binary partition of each class. Let's call these, rather arbitrarily, the
// 'yes' subset and the 'no' subset of each class, and assume that by default,
// each element of a class is in its 'no' subset. When one calls
// SplitOn(element_id), element_id is moved to the 'yes' subset of its class.
// (If it was already in the 'yes' set, it just stays there). The aim is to
// enable (later) splitting the class in two in time no greater than the time
// already spent calling SplitOn() for that class. We keep a list of the classes
// which have nonempty 'yes' sets, as visited_classes_. When one calls
// FinalizeSplit(Queue *l), for each class in visited_classes_ whose 'yes'
// and 'no' sets are both nonempty, it will create a new class consisting of
// the smaller of the two subsets (and this class will be added to the queue),
// and the old class will now be the larger of the two subsets. This call also
// resets all the yes/no partitions so that everything is in the 'no' subsets.
//
// One cannot use the Move() function if SplitOn() has been called without
// a subsequent call to FinalizeSplit()
template <typename T>
class Partition {
public:
Partition() {}
explicit Partition(T num_elements) { Initialize(num_elements); }
// Creates an empty partition for num_elements. This means that the elements
// are not assigned to a class (i.e class_index = -1); you should set up the
// number of classes using AllocateClasses() or AddClass(), and allocate each
// element to a class by calling Add(element, class_id).
void Initialize(size_t num_elements) {
elements_.resize(num_elements);
classes_.reserve(num_elements);
classes_.clear();
yes_counter_ = 1;
}
// Adds a class; returns new number of classes.
T AddClass() {
auto num_classes = classes_.size();
classes_.resize(num_classes + 1);
return num_classes;
}
// Adds 'num_classes' new (empty) classes.
void AllocateClasses(T num_classes) {
classes_.resize(classes_.size() + num_classes);
}
// Adds element_id to class_id. element_id should already have been allocated
// by calling Initialize(num_elements)---or the constructor taking
// num_elements---with num_elements > element_id. element_id must not
// currently be a member of any class; once elements have been added to a
// class, use the Move() method to move them from one class to another.
void Add(T element_id, T class_id) {
auto &this_element = elements_[element_id];
auto &this_class = classes_[class_id];
++this_class.size;
// Adds the element to the 'no' subset of the class.
auto no_head = this_class.no_head;
if (no_head >= 0) elements_[no_head].prev_element = element_id;
this_class.no_head = element_id;
this_element.class_id = class_id;
// Adds to the 'no' subset of the class.
this_element.yes = 0;
this_element.next_element = no_head;
this_element.prev_element = -1;
}
// Moves element_id from 'no' subset of its current class to 'no' subset of
// class class_id. This may not work correctly if you have called SplitOn()
// [for any element] and haven't subsequently called FinalizeSplit().
void Move(T element_id, T class_id) {
auto elements = &(elements_[0]);
auto &element = elements[element_id];
auto &old_class = classes_[element.class_id];
--old_class.size;
// Excises the element from the 'no' list of its old class, where it is
// assumed to be.
if (element.prev_element >= 0) {
elements[element.prev_element].next_element = element.next_element;
} else {
old_class.no_head = element.next_element;
}
if (element.next_element >= 0) {
elements[element.next_element].prev_element = element.prev_element;
}
// Adds to new class.
Add(element_id, class_id);
}
// Moves element_id to the 'yes' subset of its class if it was in the 'no'
// subset, and marks the class as having been visited.
void SplitOn(T element_id) {
auto elements = &(elements_[0]);
auto &element = elements[element_id];
if (element.yes == yes_counter_) {
return; // Already in the 'yes' set; nothing to do.
}
auto class_id = element.class_id;
auto &this_class = classes_[class_id];
// Excises the element from the 'no' list of its class.
if (element.prev_element >= 0) {
elements[element.prev_element].next_element = element.next_element;
} else {
this_class.no_head = element.next_element;
}
if (element.next_element >= 0) {
elements[element.next_element].prev_element = element.prev_element;
}
// Adds the element to the 'yes' list.
if (this_class.yes_head >= 0) {
elements[this_class.yes_head].prev_element = element_id;
} else {
visited_classes_.push_back(class_id);
}
element.yes = yes_counter_;
element.next_element = this_class.yes_head;
element.prev_element = -1;
this_class.yes_head = element_id;
this_class.yes_size++;
}
// This should be called after one has possibly called SplitOn for one or more
// elements, thus moving those elements to the 'yes' subset for their class.
// For each class that has a nontrivial split (i.e., it's not the case that
// all members are in the 'yes' or 'no' subset), this function creates a new
// class containing the smaller of the two subsets of elements, leaving the
// larger group of elements in the old class. The identifier of the new class
// will be added to the queue provided as the pointer L. This method then
// moves all elements to the 'no' subset of their class.
template <class Queue>
void FinalizeSplit(Queue *queue) {
for (const auto &visited_class : visited_classes_) {
const auto new_class = SplitRefine(visited_class);
if (new_class != -1 && queue) queue->Enqueue(new_class);
}
visited_classes_.clear();
// Incrementation sets all the 'yes' members of the elements to false.
++yes_counter_;
}
const T ClassId(T element_id) const { return elements_[element_id].class_id; }
const size_t ClassSize(T class_id) const { return classes_[class_id].size; }
const T NumClasses() const { return classes_.size(); }
private:
friend class PartitionIterator<T>;
// Information about a given element.
struct Element {
T class_id; // Class ID of this element.
T yes; // This is to be interpreted as a bool, true if it's in the
// 'yes' set of this class. The interpretation as bool is
// (yes == yes_counter_ ? true : false).
T next_element; // Next element in the 'no' list or 'yes' list of this
// class, whichever of the two we belong to (think of
// this as the 'next' in a doubly-linked list, although
// it is an index into the elements array). Negative
// values corresponds to null.
T prev_element; // Previous element in the 'no' or 'yes' doubly linked
// list. Negative values corresponds to null.
};
// Information about a given class.
struct Class {
Class() : size(0), yes_size(0), no_head(-1), yes_head(-1) {}
T size; // Total number of elements in this class ('no' plus 'yes'
// subsets).
T yes_size; // Total number of elements of 'yes' subset of this class.
T no_head; // Index of head element of doubly-linked list in 'no' subset.
// Everything is in the 'no' subset until you call SplitOn().
// -1 means no element.
T yes_head; // Index of head element of doubly-linked list in 'yes' subset.
// -1 means no element.
};
// This method, called from FinalizeSplit(), checks whether a class has to
// be split (a class will be split only if its 'yes' and 'no' subsets are
// both nonempty, but one can assume that since this function was called, the
// 'yes' subset is nonempty). It splits by taking the smaller subset and
// making it a new class, and leaving the larger subset of elements in the
// 'no' subset of the old class. It returns the new class if created, or -1
// if none was created.
T SplitRefine(T class_id) {
auto yes_size = classes_[class_id].yes_size;
auto size = classes_[class_id].size;
auto no_size = size - yes_size;
if (no_size == 0) {
// All members are in the 'yes' subset, so we don't have to create a new
// class, just move them all to the 'no' subset.
classes_[class_id].no_head = classes_[class_id].yes_head;
classes_[class_id].yes_head = -1;
classes_[class_id].yes_size = 0;
return -1;
} else {
auto new_class_id = classes_.size();
classes_.resize(classes_.size() + 1);
auto &old_class = classes_[class_id];
auto &new_class = classes_[new_class_id];
// The new_class will have the values from the constructor.
if (no_size < yes_size) {
// Moves the 'no' subset to new class ('no' subset).
new_class.no_head = old_class.no_head;
new_class.size = no_size;
// And makes the 'yes' subset of the old class ('no' subset).
old_class.no_head = old_class.yes_head;
old_class.yes_head = -1;
old_class.size = yes_size;
old_class.yes_size = 0;
} else {
// Moves the 'yes' subset to the new class (to the 'no' subset)
new_class.size = yes_size;
new_class.no_head = old_class.yes_head;
// Retains only the 'no' subset in the old class.
old_class.size = no_size;
old_class.yes_size = 0;
old_class.yes_head = -1;
}
auto elements = &(elements_[0]);
// Updates the 'class_id' of all the elements we moved.
for (auto e = new_class.no_head; e >= 0; e = elements[e].next_element) {
elements[e].class_id = new_class_id;
}
return new_class_id;
}
}
// elements_[i] contains all info about the i'th element.
std::vector<Element> elements_;
// classes_[i] contains all info about the i'th class.
std::vector<Class> classes_;
// Set of visited classes to be used in split refine.
std::vector<T> visited_classes_;
// yes_counter_ is used in interpreting the 'yes' members of class Element.
// If element.yes == yes_counter_, we interpret that element as being in the
// 'yes' subset of its class. This allows us to, in effect, set all those
// bools to false at a stroke by incrementing yes_counter_.
T yes_counter_;
};
// Iterates over members of the 'no' subset of a class in a partition. (When
// this is used, everything is in the 'no' subset).
template <typename T>
class PartitionIterator {
public:
using Element = typename Partition<T>::Element;
PartitionIterator(const Partition<T> &partition, T class_id)
: partition_(partition),
element_id_(partition_.classes_[class_id].no_head),
class_id_(class_id) {}
bool Done() { return element_id_ < 0; }
const T Value() { return element_id_; }
void Next() { element_id_ = partition_.elements_[element_id_].next_element; }
void Reset() { element_id_ = partition_.classes_[class_id_].no_head; }
private:
const Partition<T> &partition_;
T element_id_;
T class_id_;
};
} // namespace internal
} // namespace fst
#endif // FST_PARTITION_H_
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/script/isomorphic.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/script/fst-class.h>
#include <fst/script/isomorphic.h>
#include <fst/script/script-impl.h>
namespace fst {
namespace script {
bool Isomorphic(const FstClass &fst1, const FstClass &fst2, float delta) {
if (!internal::ArcTypesMatch(fst1, fst2, "Isomorphic")) return false;
IsomorphicInnerArgs iargs(fst1, fst2, delta);
IsomorphicArgs args(iargs);
Apply<Operation<IsomorphicArgs>>("Isomorphic", fst1.ArcType(), &args);
return args.retval;
}
REGISTER_FST_OPERATION(Isomorphic, StdArc, IsomorphicArgs);
REGISTER_FST_OPERATION(Isomorphic, LogArc, IsomorphicArgs);
REGISTER_FST_OPERATION(Isomorphic, Log64Arc, IsomorphicArgs);
} // namespace script
} // namespace fst
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/script/script-impl.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// This file defines the registration mechanism for new operations.
// These operations are designed to enable scripts to work with FST classes
// at a high level.
//
// If you have a new arc type and want these operations to work with FSTs
// with that arc type, see below for the registration steps
// you must take.
//
// These methods are only recommended for use in high-level scripting
// applications. Most users should use the lower-level templated versions
// corresponding to these.
//
// If you have a new arc type you'd like these operations to work with,
// use the REGISTER_FST_OPERATIONS macro defined in fstscript.h.
//
// If you have a custom operation you'd like to define, you need four
// components. In the following, assume you want to create a new operation
// with the signature
//
// void Foo(const FstClass &ifst, MutableFstClass *ofst);
//
// You need:
//
// 1) A way to bundle the args that your new Foo operation will take, as
// a single struct. The template structs in arg-packs.h provide a handy
// way to do this. In Foo's case, that might look like this:
//
// using FooArgs = std::pair<const FstClass &, MutableFstClass *>;
//
// Note: this package of args is going to be passed by non-const pointer.
//
// 2) A function template that is able to perform Foo, given the args and
// arc type. Yours might look like this:
//
// template<class Arc>
// void Foo(FooArgs *args) {
// // Pulls out the actual, arc-templated FSTs.
// const Fst<Arc> &ifst = std::get<0>(*args).GetFst<Arc>();
// MutableFst<Arc> *ofst = std::get<1>(*args)->GetMutableFst<Arc>();
// // Actually perform Foo on ifst and ofst.
// }
//
// 3) a client-facing function for your operation. This would look like
// the following:
//
// void Foo(const FstClass &ifst, MutableFstClass *ofst) {
// // Check that the arc types of the FSTs match
// if (!ArcTypesMatch(ifst, *ofst, "Foo")) return;
// // package the args
// FooArgs args(ifst, ofst);
// // Finally, call the operation
// Apply<Operation<FooArgs>>("Foo", ifst->ArcType(), &args);
// }
//
// The Apply<> function template takes care of the link between 2 and 3,
// provided you also have:
//
// 4) A registration for your new operation, on the arc types you care about.
// This can be provided easily by the REGISTER_FST_OPERATION macro in
// operations.h:
//
// REGISTER_FST_OPERATION(Foo, StdArc, FooArgs);
// REGISTER_FST_OPERATION(Foo, MyArc, FooArgs);
// // .. etc
//
//
// That's it! Now when you call Foo(const FstClass &, MutableFstClass *),
// it dispatches (in #3) via the Apply<> function to the correct
// instantiation of the template function in #2.
//
#ifndef FST_SCRIPT_SCRIPT_IMPL_H_
#define FST_SCRIPT_SCRIPT_IMPL_H_
// This file contains general-purpose templates which are used in the
// implementation of the operations.
#include <string>
#include <utility>
#include <fst/generic-register.h>
#include <fst/script/fst-class.h>
#include <fst/log.h>
namespace fst {
namespace script {
enum RandArcSelection {
UNIFORM_ARC_SELECTOR,
LOG_PROB_ARC_SELECTOR,
FAST_LOG_PROB_ARC_SELECTOR
};
// A generic register for operations with various kinds of signatures.
// Needed since every function signature requires a new registration class.
// The std::pair<string, string> is understood to be the operation name and arc
// type; subclasses (or typedefs) need only provide the operation signature.
template <class OperationSignature>
class GenericOperationRegister
: public GenericRegister<std::pair<string, string>, OperationSignature,
GenericOperationRegister<OperationSignature>> {
public:
void RegisterOperation(const string &operation_name, const string &arc_type,
OperationSignature op) {
this->SetEntry(std::make_pair(operation_name, arc_type), op);
}
OperationSignature GetOperation(const string &operation_name,
const string &arc_type) {
return this->GetEntry(std::make_pair(operation_name, arc_type));
}
protected:
string ConvertKeyToSoFilename(
const std::pair<string, string> &key) const final {
// Uses the old-style FST for now.
string legal_type(key.second); // The arc type.
ConvertToLegalCSymbol(&legal_type);
return legal_type + "-arc.so";
}
};
// Operation package: everything you need to register a new type of operation.
// The ArgPack should be the type that's passed into each wrapped function;
// for instance, it might be a struct containing all the args. It's always
// passed by pointer, so const members should be used to enforce constness where
// it's needed. Return values should be implemented as a member of ArgPack as
// well.
template <class Args>
struct Operation {
using ArgPack = Args;
using OpType = void (*)(ArgPack *args);
// The register (hash) type.
using Register = GenericOperationRegister<OpType>;
// The register-er type
using Registerer = GenericRegisterer<Register>;
};
// Macro for registering new types of operations.
#define REGISTER_FST_OPERATION(Op, Arc, ArgPack) \
static fst::script::Operation<ArgPack>::Registerer \
arc_dispatched_operation_##ArgPack##Op##Arc##_registerer \
(std::make_pair(#Op, Arc::Type()), Op<Arc>)
// Template function to apply an operation by name.
template <class OpReg>
void Apply(const string &op_name, const string &arc_type,
typename OpReg::ArgPack *args) {
const auto op = OpReg::Register::GetRegister()->GetOperation(op_name,
arc_type);
if (!op) {
FSTERROR() << "No operation found for " << op_name << " on "
<< "arc type " << arc_type;
return;
}
op(args);
}
namespace internal {
// Helper that logs to ERROR if the arc types of m and n don't match,
// assuming that both m and n implement .ArcType(). The op_name argument is
// used to construct the error message.
template <class M, class N>
bool ArcTypesMatch(const M &m, const N &n, const string &op_name) {
if (m.ArcType() != n.ArcType()) {
FSTERROR() << "Arguments with non-matching arc types passed to "
<< op_name << ":\t" << m.ArcType() << " and " << n.ArcType();
return false;
}
return true;
}
// From untyped to typed weights.
template <class Weight>
void CopyWeights(const std::vector<WeightClass> &weights,
std::vector<Weight> *typed_weights) {
typed_weights->clear();
typed_weights->reserve(weights.size());
for (const auto &weight : weights) {
typed_weights->push_back(*weight.GetWeight<Weight>());
}
}
// From typed to untyped weights.
template <class Weight>
void CopyWeights(const std::vector<Weight> &typed_weights,
std::vector<WeightClass> *weights) {
weights->clear();
weights->reserve(typed_weights.size());
for (const auto &typed_weight : typed_weights) {
weights->emplace_back(typed_weight);
}
}
} // namespace internal
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_SCRIPT_IMPL_H_
| 0 |
coqui_public_repos/TTS/tests | coqui_public_repos/TTS/tests/tts_tests/test_vits_d-vectors_train.py | import glob
import os
import shutil
from tests import get_device_id, get_tests_output_path, run_cli
from TTS.tts.configs.vits_config import VitsConfig
config_path = os.path.join(get_tests_output_path(), "test_model_config.json")
output_path = os.path.join(get_tests_output_path(), "train_outputs")
config = VitsConfig(
batch_size=2,
eval_batch_size=2,
num_loader_workers=0,
num_eval_loader_workers=0,
text_cleaner="english_cleaners",
use_phonemes=True,
phoneme_language="en-us",
phoneme_cache_path="tests/data/ljspeech/phoneme_cache/",
run_eval=True,
test_delay_epochs=-1,
epochs=1,
print_step=1,
print_eval=True,
test_sentences=[
["Be a voice, not an echo.", "ljspeech-0"],
],
)
# set audio config
config.audio.do_trim_silence = True
config.audio.trim_db = 60
# active multispeaker d-vec mode
config.model_args.use_d_vector_file = True
config.model_args.d_vector_file = ["tests/data/ljspeech/speakers.json"]
config.model_args.d_vector_dim = 256
config.save_json(config_path)
# train the model for one epoch
command_train = (
f"CUDA_VISIBLE_DEVICES='{get_device_id()}' python TTS/bin/train_tts.py --config_path {config_path} "
f"--coqpit.output_path {output_path} "
"--coqpit.datasets.0.formatter ljspeech "
"--coqpit.datasets.0.meta_file_train metadata.csv "
"--coqpit.datasets.0.meta_file_val metadata.csv "
"--coqpit.datasets.0.path tests/data/ljspeech "
"--coqpit.datasets.0.meta_file_attn_mask tests/data/ljspeech/metadata_attn_mask.txt "
"--coqpit.test_delay_epochs 0"
)
run_cli(command_train)
# Find latest folder
continue_path = max(glob.glob(os.path.join(output_path, "*/")), key=os.path.getmtime)
# restore the model and continue training for one more epoch
command_train = f"CUDA_VISIBLE_DEVICES='{get_device_id()}' python TTS/bin/train_tts.py --continue_path {continue_path} "
run_cli(command_train)
shutil.rmtree(continue_path)
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/tc-scorer-tests.sh | #!/bin/bash
set -xe
source $(dirname "$0")/tc-tests-utils.sh
kenlm_url=$1
maybe_py_or_android="$2"
maybe_android=""
maybe_py=""
if [ ! -z "${maybe_py_or_android}" -a "${maybe_py_or_android}" != "android" ]; then
maybe_py=${maybe_py_or_android}
extract_python_versions "${maybe_py}" "pyver" "pyver_pkg" "py_unicode_type" "pyconf" "pyalias"
fi
if [ "${maybe_py_or_android}" = "android" ]; then
maybe_android="y"
arm_flavor=$3
api_level=$4
fi;
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
mkdir -p ${TASKCLUSTER_TMP_DIR}/scorer/bins ${TASKCLUSTER_TMP_DIR}/scorer/data || true
generic_download_targz ${TASKCLUSTER_TMP_DIR}/scorer/bins/ "${kenlm_url}"
download_native_client_files ${TASKCLUSTER_TMP_DIR}/scorer/bins/
export PATH=${TASKCLUSTER_TMP_DIR}/scorer/bins/:$PATH
if [ ! -z "${pyalias}" ]; then
maybe_setup_virtualenv_cross_arm "${pyalias}" "deepspeech"
virtualenv_activate "${pyalias}" "deepspeech"
fi
if [ "${maybe_android}" = "y" ]; then
android_start_emulator "${arm_flavor}" "${api_level}"
fi
pushd ${DS_DSDIR}
SCORER_DATA_DIR=${TASKCLUSTER_TMP_DIR}/scorer/data
cp data/smoke_test/vocab.txt ${SCORER_DATA_DIR}
cp data/smoke_test/vocab.txt ${SCORER_DATA_DIR}/vocab-500.txt
cp data/alphabet.txt ${SCORER_DATA_DIR}
gzip ${SCORER_DATA_DIR}/vocab.txt
if [ "${maybe_android}" = "y" ]; then
adb shell mkdir ${ANDROID_TMP_DIR}/ds/ ${ANDROID_TMP_DIR}/scorer/
adb push ${SCORER_DATA_DIR}/ ${ANDROID_TMP_DIR}/scorer/
adb push ${TASKCLUSTER_TMP_DIR}/scorer/bins/* ${ANDROID_TMP_DIR}/ds/
SCORER_DATA_DIR=${ANDROID_TMP_DIR}/scorer/data
fi
if [ ! -z "${maybe_py}" ]; then
pip install --only-binary :all: progressbar2
python data/lm/generate_lm.py \
--input_txt ${SCORER_DATA_DIR}/vocab.txt.gz \
--output_dir ${SCORER_DATA_DIR}/ \
--top_k 500 \
--kenlm_bins ${TASKCLUSTER_TMP_DIR}/scorer/bins/ \
--arpa_order 5 \
--max_arpa_memory "85%" \
--arpa_prune "0|0|1" \
--binary_a_bits 255 \
--binary_q_bits 8 \
--binary_type trie
ls -hal ${SCORER_DATA_DIR}
fi
if [ "${maybe_android}" = "y" ]; then
${DS_BINARY_PREFIX}lmplz \
--memory 64M \
--order 2 \
--discount_fallback \
--text ${SCORER_DATA_DIR}/vocab-500.txt \
--arpa ${SCORER_DATA_DIR}/lm.arpa
${DS_BINARY_PREFIX}build_binary \
-a 255 -q 8 -v trie \
${SCORER_DATA_DIR}/lm.arpa \
${SCORER_DATA_DIR}/lm.binary
fi
${DS_BINARY_PREFIX}generate_scorer_package \
--alphabet ${SCORER_DATA_DIR}/alphabet.txt \
--lm ${SCORER_DATA_DIR}/lm.binary \
--vocab ${SCORER_DATA_DIR}/vocab-500.txt \
--package ${SCORER_DATA_DIR}/kenlm.scorer \
--default_alpha 0.5 \
--default_beta 1.25
if [ "${maybe_android}" = "y" ]; then
adb pull ${SCORER_DATA_DIR}/kenlm.scorer ${TASKCLUSTER_TMP_DIR}/scorer/data/
fi
ls -hal ${TASKCLUSTER_TMP_DIR}/scorer/data/kenlm.scorer
popd
if [ ! -z "${pyalias}" ]; then
virtualenv_deactivate "${pyalias}" "deepspeech"
fi
if [ "${maybe_android}" = "y" ]; then
android_stop_emulator
fi
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/examples-mic_vad_streaming-py36.yml | build:
template_file: examples-base.tyml
docker_image: "python:3.6"
dependencies:
- "linux-amd64-cpu-opt"
system_setup:
>
apt-get -qq -y install portaudio19-dev pulseaudio
args:
tests_cmdline: "${system.homedir.linux}/DeepSpeech/examples/mic_vad_streaming/test.sh 3.6.0:m"
workerType: "${docker.dsTests}"
metadata:
name: "DeepSpeech examples: mic VAD streaming Py3.6"
description: "DeepSpeech examples: mic VAD streaming Python 3.6"
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/bin/fstclosure.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/flags.h>
DEFINE_bool(closure_plus, false,
"Do not add the empty path (T+ instead of T*)?");
int fstclosure_main(int argc, char **argv);
int main(int argc, char **argv) { return fstclosure_main(argc, argv); }
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/bin/fstrandgen-main.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Generates random paths through an FST.
#include <cstring>
#include <memory>
#include <string>
#include <fst/flags.h>
#include <fst/log.h>
#include <fst/script/getters.h>
#include <fst/script/randgen.h>
DECLARE_int32(max_length);
DECLARE_int32(npath);
DECLARE_int32(seed);
DECLARE_string(select);
DECLARE_bool(weighted);
DECLARE_bool(remove_total_weight);
int fstrandgen_main(int argc, char **argv) {
namespace s = fst::script;
using fst::script::FstClass;
using fst::script::VectorFstClass;
string usage = "Generates random paths through an FST.\n\n Usage: ";
usage += argv[0];
usage += " [in.fst [out.fst]]\n";
std::set_new_handler(FailedNewHandler);
SET_FLAGS(usage.c_str(), &argc, &argv, true);
if (argc > 3) {
ShowUsage();
return 1;
}
VLOG(1) << argv[0] << ": Seed = " << FLAGS_seed;
const string in_name = (argc > 1 && strcmp(argv[1], "-") != 0) ? argv[1] : "";
const string out_name = argc > 2 ? argv[2] : "";
std::unique_ptr<FstClass> ifst(FstClass::Read(in_name));
if (!ifst) return 1;
VectorFstClass ofst(ifst->ArcType());
s::RandArcSelection ras;
if (!s::GetRandArcSelection(FLAGS_select, &ras)) {
LOG(ERROR) << argv[0] << ": Unknown or unsupported select type "
<< FLAGS_select;
return 1;
}
s::RandGen(*ifst, &ofst, FLAGS_seed,
fst::RandGenOptions<s::RandArcSelection>(
ras, FLAGS_max_length, FLAGS_npath, FLAGS_weighted,
FLAGS_remove_total_weight));
return !ofst.Write(out_name);
}
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/script/disambiguate.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/script/fst-class.h>
#include <fst/script/disambiguate.h>
#include <fst/script/script-impl.h>
namespace fst {
namespace script {
void Disambiguate(const FstClass &ifst, MutableFstClass *ofst,
const DisambiguateOptions &opts) {
if (!internal::ArcTypesMatch(ifst, *ofst, "Disambiguate") ||
!ofst->WeightTypesMatch(opts.weight_threshold, "Disambiguate")) {
ofst->SetProperties(kError, kError);
return;
}
DisambiguateArgs args(ifst, ofst, opts);
Apply<Operation<DisambiguateArgs>>("Disambiguate", ifst.ArcType(), &args);
}
REGISTER_FST_OPERATION(Disambiguate, StdArc, DisambiguateArgs);
REGISTER_FST_OPERATION(Disambiguate, LogArc, DisambiguateArgs);
REGISTER_FST_OPERATION(Disambiguate, Log64Arc, DisambiguateArgs);
} // namespace script
} // namespace fst
| 0 |
coqui_public_repos/inference-engine/third_party | coqui_public_repos/inference-engine/third_party/kenlm/LICENSE | Most of the code here is licensed under the LGPL. There are exceptions that
have their own licenses, listed below. See comments in those files for more
details.
util/getopt.* is getopt for Windows
util/murmur_hash.cc
util/string_piece.hh and util/string_piece.cc
util/double-conversion/LICENSE covers util/double-conversion except the build files
util/file.cc contains a modified implementation of mkstemp under the LGPL
util/integer_to_string.* is BSD
For the rest:
KenLM is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation, either version 2.1 of the License, or
(at your option) any later version.
KenLM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License 2.1
along with KenLM code. If not, see <http://www.gnu.org/licenses/lgpl-2.1.html>.
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/visit.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Queue-dependent visitation of finite-state transducers. See also dfs-visit.h.
#ifndef FST_VISIT_H_
#define FST_VISIT_H_
#include <fst/arcfilter.h>
#include <fst/mutable-fst.h>
namespace fst {
// Visitor Interface: class determining actions taken during a visit. If any of
// the boolean member functions return false, the visit is aborted by first
// calling FinishState() on all unfinished (grey) states and then calling
// FinishVisit().
//
// Note this is more general than the visitor interface in dfs-visit.h but lacks
// some DFS-specific behavior.
//
// template <class Arc>
// class Visitor {
// public:
// using StateId = typename Arc::StateId;
//
// Visitor(T *return_data);
//
// // Invoked before visit.
// void InitVisit(const Fst<Arc> &fst);
//
// // Invoked when state discovered (2nd arg is visitation root).
// bool InitState(StateId s, StateId root);
//
// // Invoked when arc to white/undiscovered state examined.
// bool WhiteArc(StateId s, const Arc &arc);
//
// // Invoked when arc to grey/unfinished state examined.
// bool GreyArc(StateId s, const Arc &arc);
//
// // Invoked when arc to black/finished state examined.
// bool BlackArc(StateId s, const Arc &arc);
//
// // Invoked when state finished.
// void FinishState(StateId s);
//
// // Invoked after visit.
// void FinishVisit();
// };
// Performs queue-dependent visitation. Visitor class argument determines
// actions and contains any return data. ArcFilter determines arcs that are
// considered. If 'access_only' is true, performs visitation only to states
// accessible from the initial state.
template <class FST, class Visitor, class Queue, class ArcFilter>
void Visit(const FST &fst, Visitor *visitor, Queue *queue, ArcFilter filter,
bool access_only = false) {
using Arc = typename FST::Arc;
using StateId = typename Arc::StateId;
visitor->InitVisit(fst);
const auto start = fst.Start();
if (start == kNoStateId) {
visitor->FinishVisit();
return;
}
// An FST's state's visit color.
static constexpr uint8 kWhiteState = 0x01; // Undiscovered.
static constexpr uint8 kGreyState = 0x02; // Discovered & unfinished.
static constexpr uint8 kBlackState = 0x04; // Finished.
// We destroy an iterator as soon as possible and mark it so.
static constexpr uint8 kArcIterDone = 0x08;
std::vector<uint8> state_status;
std::vector<ArcIterator<FST> *> arc_iterator;
MemoryPool<ArcIterator<FST>> aiter_pool;
StateId nstates = start + 1; // Number of known states in general case.
bool expanded = false;
if (fst.Properties(kExpanded, false)) { // Tests if expanded, then uses
nstates = CountStates(fst); // ExpandedFst::NumStates().
expanded = true;
}
state_status.resize(nstates, kWhiteState);
arc_iterator.resize(nstates);
StateIterator<Fst<Arc>> siter(fst);
// Continues visit while true.
bool visit = true;
// Iterates over trees in visit forest.
for (auto root = start; visit && root < nstates;) {
visit = visitor->InitState(root, root);
state_status[root] = kGreyState;
queue->Enqueue(root);
while (!queue->Empty()) {
auto state = queue->Head();
if (state >= state_status.size()) {
nstates = state + 1;
state_status.resize(nstates, kWhiteState);
arc_iterator.resize(nstates);
}
// Creates arc iterator if needed.
if (!arc_iterator[state] && !(state_status[state] & kArcIterDone) &&
visit) {
arc_iterator[state] = new (&aiter_pool) ArcIterator<FST>(fst, state);
}
// Deletes arc iterator if done.
auto *aiter = arc_iterator[state];
if ((aiter && aiter->Done()) || !visit) {
Destroy(aiter, &aiter_pool);
arc_iterator[state] = nullptr;
state_status[state] |= kArcIterDone;
}
// Dequeues state and marks black if done.
if (state_status[state] & kArcIterDone) {
queue->Dequeue();
visitor->FinishState(state);
state_status[state] = kBlackState;
continue;
}
const auto &arc = aiter->Value();
if (arc.nextstate >= state_status.size()) {
nstates = arc.nextstate + 1;
state_status.resize(nstates, kWhiteState);
arc_iterator.resize(nstates);
}
// Visits respective arc types.
if (filter(arc)) {
// Enqueues destination state and marks grey if white.
if (state_status[arc.nextstate] == kWhiteState) {
visit = visitor->WhiteArc(state, arc);
if (!visit) continue;
visit = visitor->InitState(arc.nextstate, root);
state_status[arc.nextstate] = kGreyState;
queue->Enqueue(arc.nextstate);
} else if (state_status[arc.nextstate] == kBlackState) {
visit = visitor->BlackArc(state, arc);
} else {
visit = visitor->GreyArc(state, arc);
}
}
aiter->Next();
// Destroys an iterator ASAP for efficiency.
if (aiter->Done()) {
Destroy(aiter, &aiter_pool);
arc_iterator[state] = nullptr;
state_status[state] |= kArcIterDone;
}
}
if (access_only) break;
// Finds next tree root.
for (root = (root == start) ? 0 : root + 1;
root < nstates && state_status[root] != kWhiteState; ++root) {
}
// Check for a state beyond the largest known state.
if (!expanded && root == nstates) {
for (; !siter.Done(); siter.Next()) {
if (siter.Value() == nstates) {
++nstates;
state_status.push_back(kWhiteState);
arc_iterator.push_back(nullptr);
break;
}
}
}
}
visitor->FinishVisit();
}
template <class Arc, class Visitor, class Queue>
inline void Visit(const Fst<Arc> &fst, Visitor *visitor, Queue *queue) {
Visit(fst, visitor, queue, AnyArcFilter<Arc>());
}
// Copies input FST to mutable FST following queue order.
template <class A>
class CopyVisitor {
public:
using Arc = A;
using StateId = typename Arc::StateId;
explicit CopyVisitor(MutableFst<Arc> *ofst) : ifst_(nullptr), ofst_(ofst) {}
void InitVisit(const Fst<A> &ifst) {
ifst_ = &ifst;
ofst_->DeleteStates();
ofst_->SetStart(ifst_->Start());
}
bool InitState(StateId state, StateId) {
while (ofst_->NumStates() <= state) ofst_->AddState();
return true;
}
bool WhiteArc(StateId state, const Arc &arc) {
ofst_->AddArc(state, arc);
return true;
}
bool GreyArc(StateId state, const Arc &arc) {
ofst_->AddArc(state, arc);
return true;
}
bool BlackArc(StateId state, const Arc &arc) {
ofst_->AddArc(state, arc);
return true;
}
void FinishState(StateId state) {
ofst_->SetFinal(state, ifst_->Final(state));
}
void FinishVisit() {}
private:
const Fst<Arc> *ifst_;
MutableFst<Arc> *ofst_;
};
// Visits input FST up to a state limit following queue order.
template <class A>
class PartialVisitor {
public:
using Arc = A;
using StateId = typename Arc::StateId;
explicit PartialVisitor(StateId maxvisit)
: fst_(nullptr), maxvisit_(maxvisit) {}
void InitVisit(const Fst<A> &ifst) {
fst_ = &ifst;
ninit_ = 0;
nfinish_ = 0;
}
bool InitState(StateId state, StateId root) {
++ninit_;
return ninit_ <= maxvisit_;
}
bool WhiteArc(StateId state, const Arc &arc) { return true; }
bool GreyArc(StateId state, const Arc &arc) { return true; }
bool BlackArc(StateId state, const Arc &arc) { return true; }
void FinishState(StateId state) {
fst_->Final(state); // Visits super-final arc.
++nfinish_;
}
void FinishVisit() {}
StateId NumInitialized() { return ninit_; }
StateId NumFinished() { return nfinish_; }
private:
const Fst<Arc> *fst_;
StateId maxvisit_;
StateId ninit_;
StateId nfinish_;
};
// Copies input FST to mutable FST up to a state limit following queue order.
template <class A>
class PartialCopyVisitor : public CopyVisitor<A> {
public:
using Arc = A;
using StateId = typename Arc::StateId;
using CopyVisitor<A>::WhiteArc;
PartialCopyVisitor(MutableFst<Arc> *ofst, StateId maxvisit,
bool copy_grey = true, bool copy_black = true)
: CopyVisitor<A>(ofst), maxvisit_(maxvisit),
copy_grey_(copy_grey), copy_black_(copy_black) {}
void InitVisit(const Fst<A> &ifst) {
CopyVisitor<A>::InitVisit(ifst);
ninit_ = 0;
nfinish_ = 0;
}
bool InitState(StateId state, StateId root) {
CopyVisitor<A>::InitState(state, root);
++ninit_;
return ninit_ <= maxvisit_;
}
bool GreyArc(StateId state, const Arc &arc) {
if (copy_grey_) return CopyVisitor<A>::GreyArc(state, arc);
return true;
}
bool BlackArc(StateId state, const Arc &arc) {
if (copy_black_) return CopyVisitor<A>::BlackArc(state, arc);
return true;
}
void FinishState(StateId state) {
CopyVisitor<A>::FinishState(state);
++nfinish_;
}
void FinishVisit() {}
StateId NumInitialized() { return ninit_; }
StateId NumFinished() { return nfinish_; }
private:
StateId maxvisit_;
StateId ninit_;
StateId nfinish_;
const bool copy_grey_;
const bool copy_black_;
};
} // namespace fst
#endif // FST_VISIT_H_
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/test-electronjs_v9.2-darwin-amd64-opt.yml | build:
template_file: test-darwin-opt-base.tyml
dependencies:
- "darwin-amd64-cpu-opt"
- "test-training_16k-linux-amd64-py36m-opt"
- "homebrew_tests-darwin-amd64"
test_model_task: "test-training_16k-linux-amd64-py36m-opt"
system_setup:
>
${nodejs.brew.prep_12}
args:
tests_cmdline: "$TASKCLUSTER_TASK_DIR/DeepSpeech/ds/taskcluster/tc-electron-tests.sh 12.x 9.2.0 16k"
metadata:
name: "DeepSpeech OSX AMD64 CPU ElectronJS v9.2 tests"
description: "Testing DeepSpeech for OSX/AMD64 on ElectronJS v9.2, CPU only, optimized version"
| 0 |
coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core | coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core/framework/kernel_def_builder.h | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include <memory>
#include <string>
#include <unordered_map>
#include <vector>
#include <limits.h>
#include "core/common/common.h"
#include "core/common/optional.h"
#include "core/graph/basic_types.h"
#include "core/framework/data_types.h"
#include "core/framework/allocator.h"
namespace onnxruntime {
class KernelDefBuilder;
typedef std::map<size_t, OrtMemType> MemTypeMap;
class KernelDef {
private:
// note that input/output might be on CPU implicitly when the node is from CPU execution provider
static inline bool MemTypeOnCpuExplicitly(OrtMemType mem_type) {
return mem_type == OrtMemTypeCPUInput || mem_type == OrtMemTypeCPUOutput;
}
public:
explicit KernelDef() = default;
const std::string& OpName() const {
return op_name_;
}
const std::string& Domain() const {
return op_domain_;
}
void SinceVersion(/*out*/ int* start, /*out*/ int* end) const {
*start = op_since_version_start_;
*end = op_since_version_end_;
}
#ifdef onnxruntime_PYBIND_EXPORT_OPSCHEMA
const std::pair<int, int> SinceVersion() const {
return std::pair<int, int>(op_since_version_start_, op_since_version_end_);
}
#endif
onnxruntime::ProviderType Provider() const {
return provider_type_;
}
// type constraints with types supported by default
const std::map<std::string, std::vector<MLDataType>>& TypeConstraints() const {
return default_type_constraints_;
}
// type constraints with types supported in this build
const std::map<std::string, std::vector<MLDataType>>& EnabledTypeConstraints() const {
return enabled_type_constraints_;
}
const std::vector<std::pair<int, int>>& MayInplace() const {
return inplace_map_;
}
const std::vector<std::pair<int, int>>& Alias() const {
return alias_map_;
}
const optional<std::pair<int, int>>& VariadicAlias() const {
return variadic_alias_offsets_;
}
OrtMemType InputMemoryType(size_t input_index) const {
auto it = input_memory_type_args_.find(input_index);
if (it == input_memory_type_args_.end())
return default_inputs_mem_type_;
return it->second;
}
bool IsInputOnCpu(size_t input_index) const { return MemTypeOnCpuExplicitly(InputMemoryType(input_index)); }
bool IsOutputOnCpu(size_t output_index) const { return MemTypeOnCpuExplicitly(OutputMemoryType(output_index)); }
bool AllocateInputsContiguously() const { return allocate_inputs_contiguously_; }
bool HasExternalOutputs() const { return external_outputs_; }
OrtMemType OutputMemoryType(size_t output_index) const {
auto it = output_memory_type_args_.find(output_index);
if (it == output_memory_type_args_.end())
return default_outputs_mem_type_;
return it->second;
}
int ExecQueueId() const {
return exec_queue_id_;
}
bool IsConflict(const KernelDef& other) const;
uint64_t GetHash() const noexcept {
// if we need to support different hash versions we can update CalculateHash to take a version number
// and calculate any non-default versions dynamically. we only use this during kernel lookup so
// it's not performance critical
return hash_;
}
private:
friend class KernelDefBuilder;
// called once by KernelDefBuilder::Build
void CalculateHash();
// The operator name supported by <*this> kernel..
std::string op_name_;
// The operator since_version range supported by <*this> kernel.
// A kernel could support an operator definition between <op_since_version_start>
// and <op_since_version_end> (inclusive).
int op_since_version_start_ = 1;
int op_since_version_end_ = INT_MAX;
// The operator domain supported by <*this> kernel.
// Default to 'onnxruntime::kOnnxDomain'.
// Please note the behavior of std::string("") and std::string() are different
std::string op_domain_;
// The type of the execution provider.
std::string provider_type_;
// The data types that are supported by default for inputs/outputs.
// Key is input/output name defined in op schema, Value are supported types.
// note: std::map as we need the order to be deterministic for the hash
// Note: default_type_constraints_ are used to calculate the kernel hash so that the hash is
// stable across builds with and without kernel type reduction enabled.
std::map<std::string, std::vector<MLDataType>> default_type_constraints_;
// the type constraints that are supported in this build (enabled) for the kernel
std::map<std::string, std::vector<MLDataType>> enabled_type_constraints_;
// optional alternate type constraints to use to calculate the hash instead of default_type_constraints_
// note: this provides a way to update the default type constraints while preserving the hash value
optional<std::map<std::string, std::vector<MLDataType>>> hash_type_constraints_;
// An element <i, j> means that output j reuses the memory of input i.
std::vector<std::pair<int, int>> inplace_map_;
// An element <i, j> means that output j is an alias of input i.
std::vector<std::pair<int, int>> alias_map_;
// This variable stores <input_offset, output_offset> for the variadic alias mapping
// output 'i + output_offset' is an alias of input 'i + input_offset' for all i >= 0
optional<std::pair<int, int>> variadic_alias_offsets_;
// Require input tensors to be allocated contiguously.
bool allocate_inputs_contiguously_ = false;
// Whether the outputs are from external.
bool external_outputs_ = false;
// The memory types of inputs/outputs of this kernel
MemTypeMap input_memory_type_args_;
MemTypeMap output_memory_type_args_;
// execution command queue id, 0 for default queue in execution provider
int exec_queue_id_ = 0;
// Default memory type for all inputs
OrtMemType default_inputs_mem_type_{OrtMemTypeDefault};
// Default memory type for all outputs
OrtMemType default_outputs_mem_type_{OrtMemTypeDefault};
// hash of kernel definition for lookup in minimal build
uint64_t hash_ = 0;
};
class KernelDefBuilder {
public:
static std::unique_ptr<KernelDefBuilder> Create() { return std::make_unique<KernelDefBuilder>(); }
explicit KernelDefBuilder()
: kernel_def_(new KernelDef()) {}
KernelDefBuilder& SetName(const std::string& op_name);
KernelDefBuilder& SetName(const char* op_name);
KernelDefBuilder& SetDomain(const std::string& domain);
KernelDefBuilder& SetDomain(const char* domain);
/**
This kernel supports operator definition since <since_version> (to latest).
*/
KernelDefBuilder& SinceVersion(int since_version) {
kernel_def_->op_since_version_start_ = since_version;
return *this;
}
/**
The start and end version should be set accordingly per version range for
each domain registered in OpSchemaRegistry::DomainToVersionRange in
\onnxruntime\onnxruntime\core\graph\op.h as below.
Key: domain. Value: <lowest version, highest version> pair.
std::unordered_map<std::string, std::pair<int, int>> map_;
*/
KernelDefBuilder& SinceVersion(int since_version_start, int since_version_end) {
kernel_def_->op_since_version_start_ = since_version_start;
kernel_def_->op_since_version_end_ = since_version_end;
return *this;
}
/**
The execution provider type of the kernel.
*/
KernelDefBuilder& Provider(onnxruntime::ProviderType provider_type);
KernelDefBuilder& Provider(const char* provider_type);
/**
Specify the set of types that this kernel supports. A further restriction
of the set of types specified in the op schema.
@param arg_name The arg name can be either op formal parameter name, say "X", or type
argument name specified in op schema, say "T".
@param default_types The types that are supported by default.
@param enabled_types The types that are supported in this build.
Possibly different from default_types when type reduction is enabled.
*/
KernelDefBuilder& TypeConstraint(const std::string& arg_name,
const std::vector<MLDataType>& default_types);
KernelDefBuilder& TypeConstraint(const char* arg_name,
const std::vector<MLDataType>& default_types);
KernelDefBuilder& TypeConstraint(const std::string& arg_name,
const std::vector<MLDataType>& default_types,
const std::vector<MLDataType>& enabled_types);
KernelDefBuilder& TypeConstraint(const char* arg_name,
const std::vector<MLDataType>& default_types,
const std::vector<MLDataType>& enabled_types);
/**
Like TypeConstraint but supports just a single type.
*/
KernelDefBuilder& TypeConstraint(const std::string& arg_name, MLDataType default_type);
KernelDefBuilder& TypeConstraint(const char* arg_name, MLDataType default_type);
/**
Specify the original set of types that this kernel supports by default to use when computing the kernel def hash.
The set of types supported by default may change over time, but the hash should stay the same.
*/
KernelDefBuilder& FixedTypeConstraintForHash(
const std::string& arg_name,
const std::vector<MLDataType>& default_types_for_hash);
KernelDefBuilder& FixedTypeConstraintForHash(
const char* arg_name,
const std::vector<MLDataType>& default_types_for_hash);
/**
Inplace mapping from inputs to outputs allowed.
It means that uplayer runtime could do memory in-place optimization
as it will not impact the correctness of this kernel.
*/
KernelDefBuilder& MayInplace(const std::vector<std::pair<int, int>>& inplaces);
KernelDefBuilder& MayInplace(int input_index, int output_index);
/**
Alias mapping from inputs to outputs. Different from Inplace that the
content of the tensor is not changed. This is to take care of operators
such as Identity and Reshape.
*/
KernelDefBuilder& Alias(const std::vector<std::pair<int, int>>& aliases);
KernelDefBuilder& Alias(int input_index, int output_index);
/**
Apply variadic number of alias mapping from inputs to outputs.
This is effectively applying Alias(i + input_offset, i + output_offset) for i >= 0
*/
KernelDefBuilder& VariadicAlias(int input_offset, int output_offset);
/**
Specify that this kernel requires input tensors to be allocated
contiguously. This allows kernels to execute as a single large
computation, rather than numerous smaller computations.
*/
KernelDefBuilder& AllocateInputsContiguously() {
kernel_def_->allocate_inputs_contiguously_ = true;
return *this;
}
/**
Specify that this kernel's output buffers are passed from external,
i.e. not created or managed by ORT's memory allocator.
*/
KernelDefBuilder& ExternalOutputs() {
kernel_def_->external_outputs_ = true;
return *this;
}
/**
Specify that this kernel requires an input arg
in certain memory type (instead of the default, device memory).
*/
KernelDefBuilder& InputMemoryType(OrtMemType type, int input_index) {
kernel_def_->input_memory_type_args_.insert(std::make_pair(input_index, type));
return *this;
}
/**
Specify that this kernel requires input arguments
in certain memory type (instead of the default, device memory).
*/
KernelDefBuilder& InputMemoryType(OrtMemType type, const std::vector<int>& input_indexes) {
for (auto input_index : input_indexes) {
kernel_def_->input_memory_type_args_.insert(std::make_pair(input_index, type));
}
return *this;
}
/**
Specify that this kernel provides an output arg
in certain memory type (instead of the default, device memory).
*/
KernelDefBuilder& OutputMemoryType(OrtMemType type, int output_index) {
kernel_def_->output_memory_type_args_.insert(std::make_pair(output_index, type));
return *this;
}
/**
Specify that this kernel provides an output arguments
in certain memory type (instead of the default, device memory).
*/
KernelDefBuilder& OutputMemoryType(OrtMemType type, const std::vector<int>& output_indexes) {
for (auto output_index : output_indexes) {
kernel_def_->output_memory_type_args_.insert(std::make_pair(output_index, type));
}
return *this;
}
/**
Specify that this kernel runs on which execution queue in the provider
*/
KernelDefBuilder& ExecQueueId(int queue_id) {
kernel_def_->exec_queue_id_ = queue_id;
return *this;
}
/**
Specify the default inputs memory type, if not specified, it is DefaultMemory
*/
KernelDefBuilder& SetDefaultInputsMemoryType(OrtMemType mem_type) {
kernel_def_->default_inputs_mem_type_ = mem_type;
return *this;
}
/**
Specify the default outputs memory type, if not specified, it is DefaultMemory
*/
KernelDefBuilder& SetDefaultOutputMemoryType(OrtMemType mem_type) {
kernel_def_->default_outputs_mem_type_ = mem_type;
return *this;
}
/**
Return the kernel definition, passing ownership of the KernelDef to the caller
*/
std::unique_ptr<KernelDef> Build() {
kernel_def_->CalculateHash();
return std::move(kernel_def_);
}
private:
KernelDefBuilder& TypeConstraintImpl(const std::string& arg_name,
const std::vector<MLDataType>& default_types,
const std::vector<MLDataType>* enabled_types = nullptr);
// we own the KernelDef until Build() is called.
std::unique_ptr<KernelDef> kernel_def_;
};
} // namespace onnxruntime
| 0 |
coqui_public_repos/TTS/recipes/vctk | coqui_public_repos/TTS/recipes/vctk/tacotron2-DDC/train_tacotron2-ddc.py | import os
from trainer import Trainer, TrainerArgs
from TTS.config.shared_configs import BaseAudioConfig
from TTS.tts.configs.shared_configs import BaseDatasetConfig
from TTS.tts.configs.tacotron2_config import Tacotron2Config
from TTS.tts.datasets import load_tts_samples
from TTS.tts.models.tacotron2 import Tacotron2
from TTS.tts.utils.speakers import SpeakerManager
from TTS.tts.utils.text.tokenizer import TTSTokenizer
from TTS.utils.audio import AudioProcessor
output_path = os.path.dirname(os.path.abspath(__file__))
dataset_config = BaseDatasetConfig(formatter="vctk", meta_file_train="", path=os.path.join(output_path, "../VCTK/"))
audio_config = BaseAudioConfig(
sample_rate=22050,
resample=False, # Resample to 22050 Hz. It slows down training. Use `TTS/bin/resample.py` to pre-resample and set this False for faster training.
do_trim_silence=True,
trim_db=23.0,
signal_norm=False,
mel_fmin=0.0,
mel_fmax=8000,
spec_gain=1.0,
log_func="np.log",
preemphasis=0.0,
)
config = Tacotron2Config( # This is the config that is saved for the future use
audio=audio_config,
batch_size=32,
eval_batch_size=16,
num_loader_workers=4,
num_eval_loader_workers=4,
run_eval=True,
test_delay_epochs=-1,
r=2,
# gradual_training=[[0, 6, 48], [10000, 4, 32], [50000, 3, 32], [100000, 2, 32]],
double_decoder_consistency=True,
epochs=1000,
text_cleaner="phoneme_cleaners",
use_phonemes=True,
phoneme_language="en-us",
phoneme_cache_path=os.path.join(output_path, "phoneme_cache"),
print_step=150,
print_eval=False,
mixed_precision=True,
min_text_len=0,
max_text_len=500,
min_audio_len=0,
max_audio_len=44000 * 10,
output_path=output_path,
datasets=[dataset_config],
use_speaker_embedding=True, # set this to enable multi-sepeaker training
decoder_ssim_alpha=0.0, # disable ssim losses that causes NaN for some runs.
postnet_ssim_alpha=0.0,
postnet_diff_spec_alpha=0.0,
decoder_diff_spec_alpha=0.0,
attention_norm="softmax",
optimizer="Adam",
lr_scheduler=None,
lr=3e-5,
)
# INITIALIZE THE AUDIO PROCESSOR
# Audio processor is used for feature extraction and audio I/O.
# It mainly serves to the dataloader and the training loggers.
ap = AudioProcessor.init_from_config(config)
# INITIALIZE THE TOKENIZER
# Tokenizer is used to convert text to sequences of token IDs.
# If characters are not defined in the config, default characters are passed to the config
tokenizer, config = TTSTokenizer.init_from_config(config)
# LOAD DATA SAMPLES
# Each sample is a list of ```[text, audio_file_path, speaker_name]```
# You can define your custom sample loader returning the list of samples.
# Or define your custom formatter and pass it to the `load_tts_samples`.
# Check `TTS.tts.datasets.load_tts_samples` for more details.
train_samples, eval_samples = load_tts_samples(
dataset_config,
eval_split=True,
eval_split_max_size=config.eval_split_max_size,
eval_split_size=config.eval_split_size,
)
# init speaker manager for multi-speaker training
# it mainly handles speaker-id to speaker-name for the model and the data-loader
speaker_manager = SpeakerManager()
speaker_manager.set_ids_from_data(train_samples + eval_samples, parse_key="speaker_name")
# init model
model = Tacotron2(config, ap, tokenizer, speaker_manager)
# INITIALIZE THE TRAINER
# Trainer provides a generic API to train all the 🐸TTS models with all its perks like mixed-precision training,
# distributed training, etc.
trainer = Trainer(
TrainerArgs(), config, output_path, model=model, train_samples=train_samples, eval_samples=eval_samples
)
# AND... 3,2,1... 🚀
trainer.fit()
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/depcomp | #! /bin/sh
# depcomp - compile a program generating dependencies as side-effects
scriptversion=2016-01-11.22; # UTC
# Copyright (C) 1999-2017 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Originally written by Alexandre Oliva <[email protected]>.
case $1 in
'')
echo "$0: No command. Try '$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: depcomp [--help] [--version] PROGRAM [ARGS]
Run PROGRAMS ARGS to compile a file, generating dependencies
as side-effects.
Environment variables:
depmode Dependency tracking mode.
source Source file read by 'PROGRAMS ARGS'.
object Object file output by 'PROGRAMS ARGS'.
DEPDIR directory where to store dependencies.
depfile Dependency file to output.
tmpdepfile Temporary file to use when outputting dependencies.
libtool Whether libtool is used (yes/no).
Report bugs to <[email protected]>.
EOF
exit $?
;;
-v | --v*)
echo "depcomp $scriptversion"
exit $?
;;
esac
# Get the directory component of the given path, and save it in the
# global variables '$dir'. Note that this directory component will
# be either empty or ending with a '/' character. This is deliberate.
set_dir_from ()
{
case $1 in
*/*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
*) dir=;;
esac
}
# Get the suffix-stripped basename of the given path, and save it the
# global variable '$base'.
set_base_from ()
{
base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
}
# If no dependency file was actually created by the compiler invocation,
# we still have to create a dummy depfile, to avoid errors with the
# Makefile "include basename.Plo" scheme.
make_dummy_depfile ()
{
echo "#dummy" > "$depfile"
}
# Factor out some common post-processing of the generated depfile.
# Requires the auxiliary global variable '$tmpdepfile' to be set.
aix_post_process_depfile ()
{
# If the compiler actually managed to produce a dependency file,
# post-process it.
if test -f "$tmpdepfile"; then
# Each line is of the form 'foo.o: dependency.h'.
# Do two passes, one to just change these to
# $object: dependency.h
# and one to simply output
# dependency.h:
# which is needed to avoid the deleted-header problem.
{ sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
} > "$depfile"
rm -f "$tmpdepfile"
else
make_dummy_depfile
fi
}
# A tabulation character.
tab=' '
# A newline character.
nl='
'
# Character ranges might be problematic outside the C locale.
# These definitions help.
upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
lower=abcdefghijklmnopqrstuvwxyz
digits=0123456789
alpha=${upper}${lower}
if test -z "$depmode" || test -z "$source" || test -z "$object"; then
echo "depcomp: Variables source, object and depmode must be set" 1>&2
exit 1
fi
# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
depfile=${depfile-`echo "$object" |
sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
rm -f "$tmpdepfile"
# Avoid interferences from the environment.
gccflag= dashmflag=
# Some modes work just like other modes, but use different flags. We
# parameterize here, but still list the modes in the big case below,
# to make depend.m4 easier to write. Note that we *cannot* use a case
# here, because this file can only contain one case statement.
if test "$depmode" = hp; then
# HP compiler uses -M and no extra arg.
gccflag=-M
depmode=gcc
fi
if test "$depmode" = dashXmstdout; then
# This is just like dashmstdout with a different argument.
dashmflag=-xM
depmode=dashmstdout
fi
cygpath_u="cygpath -u -f -"
if test "$depmode" = msvcmsys; then
# This is just like msvisualcpp but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvisualcpp
fi
if test "$depmode" = msvc7msys; then
# This is just like msvc7 but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvc7
fi
if test "$depmode" = xlc; then
# IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
gccflag=-qmakedep=gcc,-MF
depmode=gcc
fi
case "$depmode" in
gcc3)
## gcc 3 implements dependency tracking that does exactly what
## we want. Yay! Note: for some reason libtool 1.4 doesn't like
## it if -MD -MP comes after the -MF stuff. Hmm.
## Unfortunately, FreeBSD c89 acceptance of flags depends upon
## the command line argument order; so add the flags where they
## appear in depend2.am. Note that the slowdown incurred here
## affects only configure: in makefiles, %FASTDEP% shortcuts this.
for arg
do
case $arg in
-c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
*) set fnord "$@" "$arg" ;;
esac
shift # fnord
shift # $arg
done
"$@"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
mv "$tmpdepfile" "$depfile"
;;
gcc)
## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
## (see the conditional assignment to $gccflag above).
## There are various ways to get dependency output from gcc. Here's
## why we pick this rather obscure method:
## - Don't want to use -MD because we'd like the dependencies to end
## up in a subdir. Having to rename by hand is ugly.
## (We might end up doing this anyway to support other compilers.)
## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
## -MM, not -M (despite what the docs say). Also, it might not be
## supported by the other compilers which use the 'gcc' depmode.
## - Using -M directly means running the compiler twice (even worse
## than renaming).
if test -z "$gccflag"; then
gccflag=-MD,
fi
"$@" -Wp,"$gccflag$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The second -e expression handles DOS-style file names with drive
# letters.
sed -e 's/^[^:]*: / /' \
-e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
## This next piece of magic avoids the "deleted header file" problem.
## The problem is that when a header file which appears in a .P file
## is deleted, the dependency causes make to die (because there is
## typically no way to rebuild the header). We avoid this by adding
## dummy dependencies for each header file. Too bad gcc doesn't do
## this for us directly.
## Some versions of gcc put a space before the ':'. On the theory
## that the space means something, we add a space to the output as
## well. hp depmode also adds that space, but also prefixes the VPATH
## to the object. Take care to not repeat it in the output.
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
sgi)
if test "$libtool" = yes; then
"$@" "-Wp,-MDupdate,$tmpdepfile"
else
"$@" -MDupdate "$tmpdepfile"
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
echo "$object : \\" > "$depfile"
# Clip off the initial element (the dependent). Don't try to be
# clever and replace this with sed code, as IRIX sed won't handle
# lines with more than a fixed number of characters (4096 in
# IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
# the IRIX cc adds comments like '#:fec' to the end of the
# dependency line.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
| tr "$nl" ' ' >> "$depfile"
echo >> "$depfile"
# The second pass generates a dummy entry for each header file.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
>> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile"
;;
xlc)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
aix)
# The C for AIX Compiler uses -M and outputs the dependencies
# in a .u file. In older versions, this file always lives in the
# current directory. Also, the AIX compiler puts '$object:' at the
# start of each line; $object doesn't have directory information.
# Version 6 uses the directory in both cases.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.u
tmpdepfile2=$base.u
tmpdepfile3=$dir.libs/$base.u
"$@" -Wc,-M
else
tmpdepfile1=$dir$base.u
tmpdepfile2=$dir$base.u
tmpdepfile3=$dir$base.u
"$@" -M
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
aix_post_process_depfile
;;
tcc)
# tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
# FIXME: That version still under development at the moment of writing.
# Make that this statement remains true also for stable, released
# versions.
# It will wrap lines (doesn't matter whether long or short) with a
# trailing '\', as in:
#
# foo.o : \
# foo.c \
# foo.h \
#
# It will put a trailing '\' even on the last line, and will use leading
# spaces rather than leading tabs (at least since its commit 0394caf7
# "Emit spaces for -MD").
"$@" -MD -MF "$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
# We have to change lines of the first kind to '$object: \'.
sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
# And for each line of the second kind, we have to emit a 'dep.h:'
# dummy dependency, to avoid the deleted-header problem.
sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
rm -f "$tmpdepfile"
;;
## The order of this option in the case statement is important, since the
## shell code in configure will try each of these formats in the order
## listed in this file. A plain '-MD' option would be understood by many
## compilers, so we must ensure this comes after the gcc and icc options.
pgcc)
# Portland's C compiler understands '-MD'.
# Will always output deps to 'file.d' where file is the root name of the
# source file under compilation, even if file resides in a subdirectory.
# The object file name does not affect the name of the '.d' file.
# pgcc 10.2 will output
# foo.o: sub/foo.c sub/foo.h
# and will wrap long lines using '\' :
# foo.o: sub/foo.c ... \
# sub/foo.h ... \
# ...
set_dir_from "$object"
# Use the source, not the object, to determine the base name, since
# that's sadly what pgcc will do too.
set_base_from "$source"
tmpdepfile=$base.d
# For projects that build the same source file twice into different object
# files, the pgcc approach of using the *source* file root name can cause
# problems in parallel builds. Use a locking strategy to avoid stomping on
# the same $tmpdepfile.
lockdir=$base.d-lock
trap "
echo '$0: caught signal, cleaning up...' >&2
rmdir '$lockdir'
exit 1
" 1 2 13 15
numtries=100
i=$numtries
while test $i -gt 0; do
# mkdir is a portable test-and-set.
if mkdir "$lockdir" 2>/dev/null; then
# This process acquired the lock.
"$@" -MD
stat=$?
# Release the lock.
rmdir "$lockdir"
break
else
# If the lock is being held by a different process, wait
# until the winning process is done or we timeout.
while test -d "$lockdir" && test $i -gt 0; do
sleep 1
i=`expr $i - 1`
done
fi
i=`expr $i - 1`
done
trap - 1 2 13 15
if test $i -le 0; then
echo "$0: failed to acquire lock after $numtries attempts" >&2
echo "$0: check lockdir '$lockdir'" >&2
exit 1
fi
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each line is of the form `foo.o: dependent.h',
# or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
# Do two passes, one to just change these to
# `$object: dependent.h' and one to simply `dependent.h:'.
sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp2)
# The "hp" stanza above does not work with aCC (C++) and HP's ia64
# compilers, which have integrated preprocessors. The correct option
# to use with these is +Maked; it writes dependencies to a file named
# 'foo.d', which lands next to the object file, wherever that
# happens to be.
# Much of this is similar to the tru64 case; see comments there.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir.libs/$base.d
"$@" -Wc,+Maked
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
"$@" +Maked
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
# Add 'dependent.h:' lines.
sed -ne '2,${
s/^ *//
s/ \\*$//
s/$/:/
p
}' "$tmpdepfile" >> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile" "$tmpdepfile2"
;;
tru64)
# The Tru64 compiler uses -MD to generate dependencies as a side
# effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
# At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
# dependencies in 'foo.d' instead, so we check for that too.
# Subdirectories are respected.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
# Libtool generates 2 separate objects for the 2 libraries. These
# two compilations output dependencies in $dir.libs/$base.o.d and
# in $dir$base.o.d. We have to check for both files, because
# one of the two compilations can be disabled. We should prefer
# $dir$base.o.d over $dir.libs/$base.o.d because the latter is
# automatically cleaned when .libs/ is deleted, while ignoring
# the former would cause a distcleancheck panic.
tmpdepfile1=$dir$base.o.d # libtool 1.5
tmpdepfile2=$dir.libs/$base.o.d # Likewise.
tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504
"$@" -Wc,-MD
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
tmpdepfile3=$dir$base.d
"$@" -MD
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
# Same post-processing that is required for AIX mode.
aix_post_process_depfile
;;
msvc7)
if test "$libtool" = yes; then
showIncludes=-Wc,-showIncludes
else
showIncludes=-showIncludes
fi
"$@" $showIncludes > "$tmpdepfile"
stat=$?
grep -v '^Note: including file: ' "$tmpdepfile"
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The first sed program below extracts the file names and escapes
# backslashes for cygpath. The second sed program outputs the file
# name when reading, but also accumulates all include files in the
# hold buffer in order to output them again at the end. This only
# works with sed implementations that can handle large buffers.
sed < "$tmpdepfile" -n '
/^Note: including file: *\(.*\)/ {
s//\1/
s/\\/\\\\/g
p
}' | $cygpath_u | sort -u | sed -n '
s/ /\\ /g
s/\(.*\)/'"$tab"'\1 \\/p
s/.\(.*\) \\/\1:/
H
$ {
s/.*/'"$tab"'/
G
p
}' >> "$depfile"
echo >> "$depfile" # make sure the fragment doesn't end with a backslash
rm -f "$tmpdepfile"
;;
msvc7msys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
#nosideeffect)
# This comment above is used by automake to tell side-effect
# dependency tracking mechanisms from slower ones.
dashmstdout)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout, regardless of -o.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
test -z "$dashmflag" && dashmflag=-M
# Require at least two characters before searching for ':'
# in the target name. This is to cope with DOS-style filenames:
# a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
"$@" $dashmflag |
sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
rm -f "$depfile"
cat < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this sed invocation
# correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
dashXmstdout)
# This case only exists to satisfy depend.m4. It is never actually
# run, as this mode is specially recognized in the preamble.
exit 1
;;
makedepend)
"$@" || exit $?
# Remove any Libtool call
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# X makedepend
shift
cleared=no eat=no
for arg
do
case $cleared in
no)
set ""; shift
cleared=yes ;;
esac
if test $eat = yes; then
eat=no
continue
fi
case "$arg" in
-D*|-I*)
set fnord "$@" "$arg"; shift ;;
# Strip any option that makedepend may not understand. Remove
# the object too, otherwise makedepend will parse it as a source file.
-arch)
eat=yes ;;
-*|$object)
;;
*)
set fnord "$@" "$arg"; shift ;;
esac
done
obj_suffix=`echo "$object" | sed 's/^.*\././'`
touch "$tmpdepfile"
${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
rm -f "$depfile"
# makedepend may prepend the VPATH from the source file name to the object.
# No need to regex-escape $object, excess matching of '.' is harmless.
sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process the last invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed '1,2d' "$tmpdepfile" \
| tr ' ' "$nl" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile" "$tmpdepfile".bak
;;
cpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
"$@" -E \
| sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
-e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
| sed '$ s: \\$::' > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
cat < "$tmpdepfile" >> "$depfile"
sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvisualcpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
IFS=" "
for arg
do
case "$arg" in
-o)
shift
;;
$object)
shift
;;
"-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
set fnord "$@"
shift
shift
;;
*)
set fnord "$@" "$arg"
shift
shift
;;
esac
done
"$@" -E 2>/dev/null |
sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
echo "$tab" >> "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvcmsys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
none)
exec "$@"
;;
*)
echo "Unknown depmode $depmode" 1>&2
exit 1
;;
esac
exit 0
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/mpdt/Makefile.in | # Makefile.in generated by automake 1.15.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
@HAVE_BIN_TRUE@bin_PROGRAMS = mpdtcompose$(EXEEXT) mpdtexpand$(EXEEXT) \
@HAVE_BIN_TRUE@ mpdtinfo$(EXEEXT) mpdtreverse$(EXEEXT)
subdir = src/extensions/mpdt
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ac_python_devel.m4 \
$(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
$(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
$(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h \
$(top_builddir)/src/include/fst/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(bindir)"
LTLIBRARIES = $(lib_LTLIBRARIES)
am__DEPENDENCIES_1 =
@HAVE_SCRIPT_TRUE@libfstmpdtscript_la_DEPENDENCIES = \
@HAVE_SCRIPT_TRUE@ ../../script/libfstscript.la \
@HAVE_SCRIPT_TRUE@ ../../lib/libfst.la $(am__DEPENDENCIES_1)
am__libfstmpdtscript_la_SOURCES_DIST = mpdtscript.cc
@HAVE_SCRIPT_TRUE@am_libfstmpdtscript_la_OBJECTS = mpdtscript.lo
libfstmpdtscript_la_OBJECTS = $(am_libfstmpdtscript_la_OBJECTS)
AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
libfstmpdtscript_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \
$(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \
$(AM_CXXFLAGS) $(CXXFLAGS) $(libfstmpdtscript_la_LDFLAGS) \
$(LDFLAGS) -o $@
@HAVE_SCRIPT_TRUE@am_libfstmpdtscript_la_rpath = -rpath $(libdir)
PROGRAMS = $(bin_PROGRAMS)
am__mpdtcompose_SOURCES_DIST = mpdtcompose.cc
@HAVE_BIN_TRUE@am_mpdtcompose_OBJECTS = mpdtcompose.$(OBJEXT)
mpdtcompose_OBJECTS = $(am_mpdtcompose_OBJECTS)
mpdtcompose_LDADD = $(LDADD)
@HAVE_BIN_TRUE@mpdtcompose_DEPENDENCIES = libfstmpdtscript.la \
@HAVE_BIN_TRUE@ ../pdt/libfstpdtscript.la \
@HAVE_BIN_TRUE@ ../../script/libfstscript.la \
@HAVE_BIN_TRUE@ ../../lib/libfst.la $(am__DEPENDENCIES_1)
am__mpdtexpand_SOURCES_DIST = mpdtexpand.cc
@HAVE_BIN_TRUE@am_mpdtexpand_OBJECTS = mpdtexpand.$(OBJEXT)
mpdtexpand_OBJECTS = $(am_mpdtexpand_OBJECTS)
mpdtexpand_LDADD = $(LDADD)
@HAVE_BIN_TRUE@mpdtexpand_DEPENDENCIES = libfstmpdtscript.la \
@HAVE_BIN_TRUE@ ../pdt/libfstpdtscript.la \
@HAVE_BIN_TRUE@ ../../script/libfstscript.la \
@HAVE_BIN_TRUE@ ../../lib/libfst.la $(am__DEPENDENCIES_1)
am__mpdtinfo_SOURCES_DIST = mpdtinfo.cc
@HAVE_BIN_TRUE@am_mpdtinfo_OBJECTS = mpdtinfo.$(OBJEXT)
mpdtinfo_OBJECTS = $(am_mpdtinfo_OBJECTS)
mpdtinfo_LDADD = $(LDADD)
@HAVE_BIN_TRUE@mpdtinfo_DEPENDENCIES = libfstmpdtscript.la \
@HAVE_BIN_TRUE@ ../pdt/libfstpdtscript.la \
@HAVE_BIN_TRUE@ ../../script/libfstscript.la \
@HAVE_BIN_TRUE@ ../../lib/libfst.la $(am__DEPENDENCIES_1)
am__mpdtreverse_SOURCES_DIST = mpdtreverse.cc
@HAVE_BIN_TRUE@am_mpdtreverse_OBJECTS = mpdtreverse.$(OBJEXT)
mpdtreverse_OBJECTS = $(am_mpdtreverse_OBJECTS)
mpdtreverse_LDADD = $(LDADD)
@HAVE_BIN_TRUE@mpdtreverse_DEPENDENCIES = libfstmpdtscript.la \
@HAVE_BIN_TRUE@ ../pdt/libfstpdtscript.la \
@HAVE_BIN_TRUE@ ../../script/libfstscript.la \
@HAVE_BIN_TRUE@ ../../lib/libfst.la $(am__DEPENDENCIES_1)
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES =
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_@AM_V@)
am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@)
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_@AM_V@)
am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@)
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(libfstmpdtscript_la_SOURCES) $(mpdtcompose_SOURCES) \
$(mpdtexpand_SOURCES) $(mpdtinfo_SOURCES) \
$(mpdtreverse_SOURCES)
DIST_SOURCES = $(am__libfstmpdtscript_la_SOURCES_DIST) \
$(am__mpdtcompose_SOURCES_DIST) $(am__mpdtexpand_SOURCES_DIST) \
$(am__mpdtinfo_SOURCES_DIST) $(am__mpdtreverse_SOURCES_DIST)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CXX = @CXX@
CXXCPP = @CXXCPP@
CXXDEPMODE = @CXXDEPMODE@
CXXFLAGS = @CXXFLAGS@
CYGPATH_W = @CYGPATH_W@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DLLTOOL = @DLLTOOL@
DL_LIBS = @DL_LIBS@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GREP = @GREP@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PYTHON = @PYTHON@
PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@
PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@
PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@
PYTHON_LDFLAGS = @PYTHON_LDFLAGS@
PYTHON_PLATFORM = @PYTHON_PLATFORM@
PYTHON_PREFIX = @PYTHON_PREFIX@
PYTHON_SITE_PKG = @PYTHON_SITE_PKG@
PYTHON_VERSION = @PYTHON_VERSION@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
VERSION = @VERSION@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_CXX = @ac_ct_CXX@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
libfstdir = @libfstdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
pkgpyexecdir = @pkgpyexecdir@
pkgpythondir = @pkgpythondir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
pyexecdir = @pyexecdir@
pythondir = @pythondir@
runstatedir = @runstatedir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
AM_CPPFLAGS = -I$(srcdir)/../../include $(ICU_CPPFLAGS)
@HAVE_BIN_TRUE@LDADD = libfstmpdtscript.la \
@HAVE_BIN_TRUE@ ../pdt/libfstpdtscript.la \
@HAVE_BIN_TRUE@ ../../script/libfstscript.la \
@HAVE_BIN_TRUE@ ../../lib/libfst.la -lm $(DL_LIBS)
@HAVE_BIN_TRUE@mpdtcompose_SOURCES = mpdtcompose.cc
@HAVE_BIN_TRUE@mpdtexpand_SOURCES = mpdtexpand.cc
@HAVE_BIN_TRUE@mpdtinfo_SOURCES = mpdtinfo.cc
@HAVE_BIN_TRUE@mpdtreverse_SOURCES = mpdtreverse.cc
@HAVE_SCRIPT_TRUE@lib_LTLIBRARIES = libfstmpdtscript.la
@HAVE_SCRIPT_TRUE@libfstmpdtscript_la_SOURCES = mpdtscript.cc
@HAVE_SCRIPT_TRUE@libfstmpdtscript_la_LDFLAGS = -version-info 13:0:0
@HAVE_SCRIPT_TRUE@libfstmpdtscript_la_LIBADD = ../../script/libfstscript.la \
@HAVE_SCRIPT_TRUE@ ../../lib/libfst.la -lm $(DL_LIBS)
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/extensions/mpdt/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign src/extensions/mpdt/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
install-libLTLIBRARIES: $(lib_LTLIBRARIES)
@$(NORMAL_INSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
list2=; for p in $$list; do \
if test -f $$p; then \
list2="$$list2 $$p"; \
else :; fi; \
done; \
test -z "$$list2" || { \
echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \
$(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \
}
uninstall-libLTLIBRARIES:
@$(NORMAL_UNINSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
for p in $$list; do \
$(am__strip_dir) \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \
done
clean-libLTLIBRARIES:
-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
@list='$(lib_LTLIBRARIES)'; \
locs=`for p in $$list; do echo $$p; done | \
sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
sort -u`; \
test -z "$$locs" || { \
echo rm -f $${locs}; \
rm -f $${locs}; \
}
libfstmpdtscript.la: $(libfstmpdtscript_la_OBJECTS) $(libfstmpdtscript_la_DEPENDENCIES) $(EXTRA_libfstmpdtscript_la_DEPENDENCIES)
$(AM_V_CXXLD)$(libfstmpdtscript_la_LINK) $(am_libfstmpdtscript_la_rpath) $(libfstmpdtscript_la_OBJECTS) $(libfstmpdtscript_la_LIBADD) $(LIBS)
install-binPROGRAMS: $(bin_PROGRAMS)
@$(NORMAL_INSTALL)
@list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
$(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \
fi; \
for p in $$list; do echo "$$p $$p"; done | \
sed 's/$(EXEEXT)$$//' | \
while read p p1; do if test -f $$p \
|| test -f $$p1 \
; then echo "$$p"; echo "$$p"; else :; fi; \
done | \
sed -e 'p;s,.*/,,;n;h' \
-e 's|.*|.|' \
-e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \
sed 'N;N;N;s,\n, ,g' | \
$(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \
{ d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
if ($$2 == $$4) files[d] = files[d] " " $$1; \
else { print "f", $$3 "/" $$4, $$1; } } \
END { for (d in files) print "f", d, files[d] }' | \
while read type dir files; do \
if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
test -z "$$files" || { \
echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \
$(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \
} \
; done
uninstall-binPROGRAMS:
@$(NORMAL_UNINSTALL)
@list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
files=`for p in $$list; do echo "$$p"; done | \
sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \
-e 's/$$/$(EXEEXT)/' \
`; \
test -n "$$list" || exit 0; \
echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \
cd "$(DESTDIR)$(bindir)" && rm -f $$files
clean-binPROGRAMS:
@list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
mpdtcompose$(EXEEXT): $(mpdtcompose_OBJECTS) $(mpdtcompose_DEPENDENCIES) $(EXTRA_mpdtcompose_DEPENDENCIES)
@rm -f mpdtcompose$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(mpdtcompose_OBJECTS) $(mpdtcompose_LDADD) $(LIBS)
mpdtexpand$(EXEEXT): $(mpdtexpand_OBJECTS) $(mpdtexpand_DEPENDENCIES) $(EXTRA_mpdtexpand_DEPENDENCIES)
@rm -f mpdtexpand$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(mpdtexpand_OBJECTS) $(mpdtexpand_LDADD) $(LIBS)
mpdtinfo$(EXEEXT): $(mpdtinfo_OBJECTS) $(mpdtinfo_DEPENDENCIES) $(EXTRA_mpdtinfo_DEPENDENCIES)
@rm -f mpdtinfo$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(mpdtinfo_OBJECTS) $(mpdtinfo_LDADD) $(LIBS)
mpdtreverse$(EXEEXT): $(mpdtreverse_OBJECTS) $(mpdtreverse_DEPENDENCIES) $(EXTRA_mpdtreverse_DEPENDENCIES)
@rm -f mpdtreverse$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(mpdtreverse_OBJECTS) $(mpdtreverse_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpdtcompose.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpdtexpand.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpdtinfo.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpdtreverse.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpdtscript.Plo@am__quote@
.cc.o:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $<
.cc.obj:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
@am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Plo
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(LTLIBRARIES) $(PROGRAMS)
install-binPROGRAMS: install-libLTLIBRARIES
installdirs:
for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(bindir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-binPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-binPROGRAMS install-libLTLIBRARIES
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-binPROGRAMS uninstall-libLTLIBRARIES
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \
clean-binPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool cscopelist-am ctags ctags-am distclean \
distclean-compile distclean-generic distclean-libtool \
distclean-tags distdir dvi dvi-am html html-am info info-am \
install install-am install-binPROGRAMS install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-libLTLIBRARIES install-man install-pdf \
install-pdf-am install-ps install-ps-am install-strip \
installcheck installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags tags-am uninstall uninstall-am uninstall-binPROGRAMS \
uninstall-libLTLIBRARIES
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7 | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/m4/ac_python_devel.m4 | dnl @synopsis AC_PYTHON_DEVEL([version])
dnl
dnl Note: Defines as a precious variable "PYTHON_VERSION". Don't
dnl override it in your configure.ac.
dnl
dnl This macro checks for Python and tries to get the include path to
dnl 'Python.h'. It provides the $(PYTHON_CPPFLAGS) and
dnl $(PYTHON_LDFLAGS) output variables. It also exports
dnl $(PYTHON_EXTRA_LIBS) and $(PYTHON_EXTRA_LDFLAGS) for embedding
dnl Python in your code.
dnl
dnl You can search for some particular version of Python by passing a
dnl parameter to this macro, for example ">= '2.3.1'", or "== '2.4'".
dnl Please note that you *have* to pass also an operator along with the
dnl version to match, and pay special attention to the single quotes
dnl surrounding the version number. Don't use "PYTHON_VERSION" for
dnl this: that environment variable is declared as precious and thus
dnl reserved for the end-user.
dnl
dnl This macro should work for all versions of Python >= 2.1.0. As an
dnl end user, you can disable the check for the python version by
dnl setting the PYTHON_NOVERSIONCHECK environment variable to something
dnl else than the empty string.
dnl
dnl If you need to use this macro for an older Python version, please
dnl contact the authors. We're always open for feedback.
dnl
dnl @category InstalledPackages
dnl @author Sebastian Huber <[email protected]>
dnl @author Alan W. Irwin <[email protected]>
dnl @author Rafael Laboissiere <[email protected]>
dnl @author Andrew Collier <[email protected]>
dnl @author Matteo Settenvini <[email protected]>
dnl @author Horst Knorr <[email protected]>
dnl @version 2006-05-27
dnl @license GPLWithACException
AC_DEFUN([AC_PYTHON_DEVEL],[
#
# Allow the use of a (user set) custom python version
#
AC_ARG_VAR([PYTHON_VERSION],[The installed Python
version to use, for example '2.3'. This string
will be appended to the Python interpreter
canonical name.])
AC_PATH_PROG([PYTHON],[python[$PYTHON_VERSION]])
if test -z "$PYTHON"; then
AC_MSG_ERROR([Cannot find python$PYTHON_VERSION in your system path])
PYTHON_VERSION=""
fi
#
# Check for a version of Python >= 2.1.0
#
AC_MSG_CHECKING([for a version of Python >= '2.1.0'])
ac_supports_python_ver=`$PYTHON -c "import sys, string; \
ver = string.split(sys.version)[[0]]; \
print ver >= '2.1.0'"`
if test "$ac_supports_python_ver" != "True"; then
if test -z "$PYTHON_NOVERSIONCHECK"; then
AC_MSG_RESULT([no])
AC_MSG_FAILURE([
This version of the AC@&t@_PYTHON_DEVEL macro
doesn't work properly with versions of Python before
2.1.0. You may need to re-run configure, setting the
variables PYTHON_CPPFLAGS, PYTHON_LDFLAGS, PYTHON_SITE_PKG,
PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand.
Moreover, to disable this check, set PYTHON_NOVERSIONCHECK
to something else than an empty string.
])
else
AC_MSG_RESULT([skip at user request])
fi
else
AC_MSG_RESULT([yes])
fi
#
# if the macro parameter ``version'' is set, honour it
#
if test -n "$1"; then
AC_MSG_CHECKING([for a version of Python $1])
ac_supports_python_ver=`$PYTHON -c "import sys, string; \
ver = string.split(sys.version)[[0]]; \
print ver $1"`
if test "$ac_supports_python_ver" = "True"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AC_MSG_ERROR([this package requires Python $1.
If you have it installed, but it isn't the default Python
interpreter in your system path, please pass the PYTHON_VERSION
variable to configure. See ``configure --help'' for reference.
])
PYTHON_VERSION=""
fi
fi
#
# Check if you have distutils, else fail
#
AC_MSG_CHECKING([for the distutils Python package])
ac_distutils_result=`$PYTHON -c "import distutils" 2>&1`
if test -z "$ac_distutils_result"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AC_MSG_ERROR([cannot import Python module "distutils".
Please check your Python installation. The error was:
$ac_distutils_result])
PYTHON_VERSION=""
fi
#
# Check for Python include path
#
AC_MSG_CHECKING([for Python include path])
if test -z "$PYTHON_CPPFLAGS"; then
python_path=`$PYTHON -c "import distutils.sysconfig; \
print distutils.sysconfig.get_python_inc();"`
if test -n "${python_path}"; then
python_path="-I$python_path"
fi
PYTHON_CPPFLAGS=$python_path
fi
AC_MSG_RESULT([$PYTHON_CPPFLAGS])
AC_SUBST([PYTHON_CPPFLAGS])
#
# Check for Python library path
#
AC_MSG_CHECKING([for Python library path])
if test -z "$PYTHON_LDFLAGS"; then
# (makes two attempts to ensure we've got a version number
# from the interpreter)
py_version=`$PYTHON -c "from distutils.sysconfig import *; \
from string import join; \
print join(get_config_vars('VERSION'))"`
if test "$py_version" == "[None]"; then
if test -n "$PYTHON_VERSION"; then
py_version=$PYTHON_VERSION
else
py_version=`$PYTHON -c "import sys; \
print sys.version[[:3]]"`
fi
fi
PYTHON_LDFLAGS=`$PYTHON -c "from distutils.sysconfig import *; \
from string import join; \
print '-L' + get_python_lib(0,1), \
'-lpython';"`$py_version
fi
AC_MSG_RESULT([$PYTHON_LDFLAGS])
AC_SUBST([PYTHON_LDFLAGS])
#
# Check for site packages
#
AC_MSG_CHECKING([for Python site-packages path])
if test -z "$PYTHON_SITE_PKG"; then
PYTHON_SITE_PKG=`$PYTHON -c "import distutils.sysconfig; \
print distutils.sysconfig.get_python_lib(0,0);"`
fi
AC_MSG_RESULT([$PYTHON_SITE_PKG])
AC_SUBST([PYTHON_SITE_PKG])
#
# libraries which must be linked in when embedding
#
AC_MSG_CHECKING(python extra libraries)
if test -z "$PYTHON_EXTRA_LIBS"; then
PYTHON_EXTRA_LIBS=`$PYTHON -c "import distutils.sysconfig; \
conf = distutils.sysconfig.get_config_var; \
print conf('LOCALMODLIBS'), conf('LIBS')"`
fi
AC_MSG_RESULT([$PYTHON_EXTRA_LIBS])
AC_SUBST(PYTHON_EXTRA_LIBS)
#
# linking flags needed when embedding
#
AC_MSG_CHECKING(python extra linking flags)
if test -z "$PYTHON_EXTRA_LDFLAGS"; then
PYTHON_EXTRA_LDFLAGS=`$PYTHON -c "import distutils.sysconfig; \
conf = distutils.sysconfig.get_config_var; \
print conf('LINKFORSHARED')"`
fi
AC_MSG_RESULT([$PYTHON_EXTRA_LDFLAGS])
AC_SUBST(PYTHON_EXTRA_LDFLAGS)
#
# final check to see if everything compiles alright
#
AC_MSG_CHECKING([consistency of all components of python development environment])
AC_LANG_PUSH([C])
# save current global flags
LIBS="$ac_save_LIBS $PYTHON_LDFLAGS"
CPPFLAGS="$ac_save_CPPFLAGS $PYTHON_CPPFLAGS"
AC_TRY_LINK([
#include <Python.h>
],[
Py_Initialize();
],[pythonexists=yes],[pythonexists=no])
AC_MSG_RESULT([$pythonexists])
if test ! "$pythonexists" = "yes"; then
AC_MSG_ERROR([
Could not link test program to Python. Maybe the main Python library has been
installed in some non-standard library path. If so, pass it to configure,
via the LDFLAGS environment variable.
Example: ./configure LDFLAGS="-L/usr/non-standard-path/python/lib"
============================================================================
ERROR!
You probably have to install the development version of the Python package
for your distribution. The exact name of this package varies among them.
============================================================================
])
PYTHON_VERSION=""
fi
AC_LANG_POP
# turn back to default flags
CPPFLAGS="$ac_save_CPPFLAGS"
LIBS="$ac_save_LIBS"
#
# all done!
#
])
| 0 |
coqui_public_repos/STT/native_client/kenlm | coqui_public_repos/STT/native_client/kenlm/lm/binary_format.hh | #ifndef LM_BINARY_FORMAT_H
#define LM_BINARY_FORMAT_H
#include "config.hh"
#include "model_type.hh"
#include "read_arpa.hh"
#include "../util/file_piece.hh"
#include "../util/mmap.hh"
#include "../util/scoped.hh"
#include <cstddef>
#include <vector>
#include <stdint.h>
namespace lm {
namespace ngram {
extern const char *kModelNames[6];
/*Inspect a file to determine if it is a binary lm. If not, return false.
* If so, return true and set recognized to the type. This is the only API in
* this header designed for use by decoder authors.
*/
KENLM_EXPORT bool RecognizeBinary(const char *file, ModelType &recognized);
KENLM_EXPORT bool RecognizeBinary(const char *file_data, const uint64_t file_data_size, ModelType &recognized);
struct FixedWidthParameters {
unsigned char order;
float probing_multiplier;
// What type of model is this?
ModelType model_type;
// Does the end of the file have the actual strings in the vocabulary?
bool has_vocabulary;
unsigned int search_version;
};
// This is a macro instead of an inline function so constants can be assigned using it.
#define ALIGN8(a) ((std::ptrdiff_t(((a)-1)/8)+1)*8)
// Parameters stored in the header of a binary file.
struct Parameters {
FixedWidthParameters fixed;
std::vector<uint64_t> counts;
};
class BinaryFormat {
public:
explicit BinaryFormat(const Config &config);
~BinaryFormat(){
file_data_ = NULL;
}
// Reading a binary file:
// Takes ownership of fd
void InitializeBinary(int fd, ModelType model_type, unsigned int search_version, Parameters ¶ms);
void InitializeBinary(const char *file_data, ModelType model_type, unsigned int search_version, Parameters ¶ms);
// Used to read parts of the file to update the config object before figuring out full size.
void ReadForConfig(void *to, std::size_t amount, uint64_t offset_excluding_header) const;
void ReadForConfig(void *to, std::size_t amount, uint64_t offset_excluding_header, bool useMemory) const;
// Actually load the binary file and return a pointer to the beginning of the search area.
void *LoadBinary(std::size_t size);
void *LoadBinary(std::size_t size, const uint64_t file_size);
uint64_t VocabStringReadingOffset() const {
assert(vocab_string_offset_ != kInvalidOffset);
return vocab_string_offset_;
}
// Writing a binary file or initializing in RAM from ARPA:
// Size for vocabulary.
void *SetupJustVocab(std::size_t memory_size, uint8_t order);
// Warning: can change the vocaulary base pointer.
void *GrowForSearch(std::size_t memory_size, std::size_t vocab_pad, void *&vocab_base);
// Warning: can change vocabulary and search base addresses.
void WriteVocabWords(const std::string &buffer, void *&vocab_base, void *&search_base);
// Write the header at the beginning of the file.
void FinishFile(const Config &config, ModelType model_type, unsigned int search_version, const std::vector<uint64_t> &counts);
private:
void MapFile(void *&vocab_base, void *&search_base);
// Copied from configuration.
const Config::WriteMethod write_method_;
const char *write_mmap_;
util::LoadMethod load_method_;
// File behind memory, if any.
util::scoped_fd file_;
const char *file_data_;
// If there is a file involved, a single mapping.
util::scoped_memory mapping_= new util::scoped_memory(true);
// If the data is only in memory, separately allocate each because the trie
// knows vocab's size before it knows search's size (because SRILM might
// have pruned).
util::scoped_memory memory_vocab_, memory_search_;
// Memory ranges. Note that these may not be contiguous and may not all
// exist.
std::size_t header_size_, vocab_size_, vocab_pad_;
// aka end of search.
uint64_t vocab_string_offset_;
static const uint64_t kInvalidOffset = (uint64_t)-1;
};
bool IsBinaryFormat(int fd);
bool IsBinaryFormat(const char *file_data, uint64_t size);
} // namespace ngram
} // namespace lm
#endif // LM_BINARY_FORMAT_H
| 0 |
coqui_public_repos/TTS/TTS/vocoder | coqui_public_repos/TTS/TTS/vocoder/models/parallel_wavegan_discriminator.py | import math
import torch
from torch import nn
from torch.nn.utils.parametrize import remove_parametrizations
from TTS.vocoder.layers.parallel_wavegan import ResidualBlock
class ParallelWaveganDiscriminator(nn.Module):
"""PWGAN discriminator as in https://arxiv.org/abs/1910.11480.
It classifies each audio window real/fake and returns a sequence
of predictions.
It is a stack of convolutional blocks with dilation.
"""
# pylint: disable=dangerous-default-value
def __init__(
self,
in_channels=1,
out_channels=1,
kernel_size=3,
num_layers=10,
conv_channels=64,
dilation_factor=1,
nonlinear_activation="LeakyReLU",
nonlinear_activation_params={"negative_slope": 0.2},
bias=True,
):
super().__init__()
assert (kernel_size - 1) % 2 == 0, " [!] does not support even number kernel size."
assert dilation_factor > 0, " [!] dilation factor must be > 0."
self.conv_layers = nn.ModuleList()
conv_in_channels = in_channels
for i in range(num_layers - 1):
if i == 0:
dilation = 1
else:
dilation = i if dilation_factor == 1 else dilation_factor**i
conv_in_channels = conv_channels
padding = (kernel_size - 1) // 2 * dilation
conv_layer = [
nn.Conv1d(
conv_in_channels,
conv_channels,
kernel_size=kernel_size,
padding=padding,
dilation=dilation,
bias=bias,
),
getattr(nn, nonlinear_activation)(inplace=True, **nonlinear_activation_params),
]
self.conv_layers += conv_layer
padding = (kernel_size - 1) // 2
last_conv_layer = nn.Conv1d(conv_in_channels, out_channels, kernel_size=kernel_size, padding=padding, bias=bias)
self.conv_layers += [last_conv_layer]
self.apply_weight_norm()
def forward(self, x):
"""
x : (B, 1, T).
Returns:
Tensor: (B, 1, T)
"""
for f in self.conv_layers:
x = f(x)
return x
def apply_weight_norm(self):
def _apply_weight_norm(m):
if isinstance(m, (torch.nn.Conv1d, torch.nn.Conv2d)):
torch.nn.utils.parametrizations.weight_norm(m)
self.apply(_apply_weight_norm)
def remove_weight_norm(self):
def _remove_weight_norm(m):
try:
# print(f"Weight norm is removed from {m}.")
remove_parametrizations(m, "weight")
except ValueError: # this module didn't have weight norm
return
self.apply(_remove_weight_norm)
class ResidualParallelWaveganDiscriminator(nn.Module):
# pylint: disable=dangerous-default-value
def __init__(
self,
in_channels=1,
out_channels=1,
kernel_size=3,
num_layers=30,
stacks=3,
res_channels=64,
gate_channels=128,
skip_channels=64,
dropout=0.0,
bias=True,
nonlinear_activation="LeakyReLU",
nonlinear_activation_params={"negative_slope": 0.2},
):
super().__init__()
assert (kernel_size - 1) % 2 == 0, "Not support even number kernel size."
self.in_channels = in_channels
self.out_channels = out_channels
self.num_layers = num_layers
self.stacks = stacks
self.kernel_size = kernel_size
self.res_factor = math.sqrt(1.0 / num_layers)
# check the number of num_layers and stacks
assert num_layers % stacks == 0
layers_per_stack = num_layers // stacks
# define first convolution
self.first_conv = nn.Sequential(
nn.Conv1d(in_channels, res_channels, kernel_size=1, padding=0, dilation=1, bias=True),
getattr(nn, nonlinear_activation)(inplace=True, **nonlinear_activation_params),
)
# define residual blocks
self.conv_layers = nn.ModuleList()
for layer in range(num_layers):
dilation = 2 ** (layer % layers_per_stack)
conv = ResidualBlock(
kernel_size=kernel_size,
res_channels=res_channels,
gate_channels=gate_channels,
skip_channels=skip_channels,
aux_channels=-1,
dilation=dilation,
dropout=dropout,
bias=bias,
use_causal_conv=False,
)
self.conv_layers += [conv]
# define output layers
self.last_conv_layers = nn.ModuleList(
[
getattr(nn, nonlinear_activation)(inplace=True, **nonlinear_activation_params),
nn.Conv1d(skip_channels, skip_channels, kernel_size=1, padding=0, dilation=1, bias=True),
getattr(nn, nonlinear_activation)(inplace=True, **nonlinear_activation_params),
nn.Conv1d(skip_channels, out_channels, kernel_size=1, padding=0, dilation=1, bias=True),
]
)
# apply weight norm
self.apply_weight_norm()
def forward(self, x):
"""
x: (B, 1, T).
"""
x = self.first_conv(x)
skips = 0
for f in self.conv_layers:
x, h = f(x, None)
skips += h
skips *= self.res_factor
# apply final layers
x = skips
for f in self.last_conv_layers:
x = f(x)
return x
def apply_weight_norm(self):
def _apply_weight_norm(m):
if isinstance(m, (torch.nn.Conv1d, torch.nn.Conv2d)):
torch.nn.utils.parametrizations.weight_norm(m)
self.apply(_apply_weight_norm)
def remove_weight_norm(self):
def _remove_weight_norm(m):
try:
print(f"Weight norm is removed from {m}.")
remove_parametrizations(m, "weight")
except ValueError: # this module didn't have weight norm
return
self.apply(_remove_weight_norm)
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/android-cache-armeabi-v7a-android-24.yml | build:
template_file: generic_tc_caching-linux-opt-base.tyml
system_setup:
>
${java.packages_xenial.apt}
cache:
artifact_url: ${system.android_cache.armeabi_v7a.android_24.url}
artifact_namespace: ${system.android_cache.armeabi_v7a.android_24.namespace}
scripts:
setup: "taskcluster/tc-true.sh"
build: "taskcluster/android_cache-build.sh armeabi-v7a android-24 default"
package: "taskcluster/android_cache-package.sh"
workerType: "${docker.smallTask}"
metadata:
name: "Builds Android cache armeabi-v7a / android-24"
description: "Setup an Android SDK / emulator cache for Android armeabi-v7a / android-24"
| 0 |
coqui_public_repos/inference-engine/third_party | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/compile | #! /bin/sh
# Wrapper for compilers which do not understand '-c -o'.
scriptversion=2012-10-14.11; # UTC
# Copyright (C) 1999-2013 Free Software Foundation, Inc.
# Written by Tom Tromey <[email protected]>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# This file is maintained in Automake, please report
# bugs to <[email protected]> or send patches to
# <[email protected]>.
nl='
'
# We need space, tab and new line, in precisely that order. Quoting is
# there to prevent tools from complaining about whitespace usage.
IFS=" "" $nl"
file_conv=
# func_file_conv build_file lazy
# Convert a $build file to $host form and store it in $file
# Currently only supports Windows hosts. If the determined conversion
# type is listed in (the comma separated) LAZY, no conversion will
# take place.
func_file_conv ()
{
file=$1
case $file in
/ | /[!/]*) # absolute file, and not a UNC file
if test -z "$file_conv"; then
# lazily determine how to convert abs files
case `uname -s` in
MINGW*)
file_conv=mingw
;;
CYGWIN*)
file_conv=cygwin
;;
*)
file_conv=wine
;;
esac
fi
case $file_conv/,$2, in
*,$file_conv,*)
;;
mingw/*)
file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
;;
cygwin/*)
file=`cygpath -m "$file" || echo "$file"`
;;
wine/*)
file=`winepath -w "$file" || echo "$file"`
;;
esac
;;
esac
}
# func_cl_dashL linkdir
# Make cl look for libraries in LINKDIR
func_cl_dashL ()
{
func_file_conv "$1"
if test -z "$lib_path"; then
lib_path=$file
else
lib_path="$lib_path;$file"
fi
linker_opts="$linker_opts -LIBPATH:$file"
}
# func_cl_dashl library
# Do a library search-path lookup for cl
func_cl_dashl ()
{
lib=$1
found=no
save_IFS=$IFS
IFS=';'
for dir in $lib_path $LIB
do
IFS=$save_IFS
if $shared && test -f "$dir/$lib.dll.lib"; then
found=yes
lib=$dir/$lib.dll.lib
break
fi
if test -f "$dir/$lib.lib"; then
found=yes
lib=$dir/$lib.lib
break
fi
if test -f "$dir/lib$lib.a"; then
found=yes
lib=$dir/lib$lib.a
break
fi
done
IFS=$save_IFS
if test "$found" != yes; then
lib=$lib.lib
fi
}
# func_cl_wrapper cl arg...
# Adjust compile command to suit cl
func_cl_wrapper ()
{
# Assume a capable shell
lib_path=
shared=:
linker_opts=
for arg
do
if test -n "$eat"; then
eat=
else
case $1 in
-o)
# configure might choose to run compile as 'compile cc -o foo foo.c'.
eat=1
case $2 in
*.o | *.[oO][bB][jJ])
func_file_conv "$2"
set x "$@" -Fo"$file"
shift
;;
*)
func_file_conv "$2"
set x "$@" -Fe"$file"
shift
;;
esac
;;
-I)
eat=1
func_file_conv "$2" mingw
set x "$@" -I"$file"
shift
;;
-I*)
func_file_conv "${1#-I}" mingw
set x "$@" -I"$file"
shift
;;
-l)
eat=1
func_cl_dashl "$2"
set x "$@" "$lib"
shift
;;
-l*)
func_cl_dashl "${1#-l}"
set x "$@" "$lib"
shift
;;
-L)
eat=1
func_cl_dashL "$2"
;;
-L*)
func_cl_dashL "${1#-L}"
;;
-static)
shared=false
;;
-Wl,*)
arg=${1#-Wl,}
save_ifs="$IFS"; IFS=','
for flag in $arg; do
IFS="$save_ifs"
linker_opts="$linker_opts $flag"
done
IFS="$save_ifs"
;;
-Xlinker)
eat=1
linker_opts="$linker_opts $2"
;;
-*)
set x "$@" "$1"
shift
;;
*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
func_file_conv "$1"
set x "$@" -Tp"$file"
shift
;;
*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
func_file_conv "$1" mingw
set x "$@" "$file"
shift
;;
*)
set x "$@" "$1"
shift
;;
esac
fi
shift
done
if test -n "$linker_opts"; then
linker_opts="-link$linker_opts"
fi
exec "$@" $linker_opts
exit 1
}
eat=
case $1 in
'')
echo "$0: No command. Try '$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: compile [--help] [--version] PROGRAM [ARGS]
Wrapper for compilers which do not understand '-c -o'.
Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
arguments, and rename the output as expected.
If you are trying to build a whole package this is not the
right script to run: please start by reading the file 'INSTALL'.
Report bugs to <[email protected]>.
EOF
exit $?
;;
-v | --v*)
echo "compile $scriptversion"
exit $?
;;
cl | *[/\\]cl | cl.exe | *[/\\]cl.exe )
func_cl_wrapper "$@" # Doesn't return...
;;
esac
ofile=
cfile=
for arg
do
if test -n "$eat"; then
eat=
else
case $1 in
-o)
# configure might choose to run compile as 'compile cc -o foo foo.c'.
# So we strip '-o arg' only if arg is an object.
eat=1
case $2 in
*.o | *.obj)
ofile=$2
;;
*)
set x "$@" -o "$2"
shift
;;
esac
;;
*.c)
cfile=$1
set x "$@" "$1"
shift
;;
*)
set x "$@" "$1"
shift
;;
esac
fi
shift
done
if test -z "$ofile" || test -z "$cfile"; then
# If no '-o' option was seen then we might have been invoked from a
# pattern rule where we don't need one. That is ok -- this is a
# normal compilation that the losing compiler can handle. If no
# '.c' file was seen then we are probably linking. That is also
# ok.
exec "$@"
fi
# Name of file we expect compiler to create.
cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
# Create the lock directory.
# Note: use '[/\\:.-]' here to ensure that we don't use the same name
# that we are using for the .o file. Also, base the name on the expected
# object file name, since that is what matters with a parallel build.
lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
while true; do
if mkdir "$lockdir" >/dev/null 2>&1; then
break
fi
sleep 1
done
# FIXME: race condition here if user kills between mkdir and trap.
trap "rmdir '$lockdir'; exit 1" 1 2 15
# Run the compile.
"$@"
ret=$?
if test -f "$cofile"; then
test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
elif test -f "${cofile}bj"; then
test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
fi
rmdir "$lockdir"
exit $ret
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'write-file-hooks 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC"
# time-stamp-end: "; # UTC"
# End:
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/randequivalent.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Tests if two FSTS are equivalent by checking if random strings from one FST
// are transduced the same by both FSTs.
#ifndef FST_RANDEQUIVALENT_H_
#define FST_RANDEQUIVALENT_H_
#include <fst/log.h>
#include <fst/arcsort.h>
#include <fst/compose.h>
#include <fst/project.h>
#include <fst/randgen.h>
#include <fst/shortest-distance.h>
#include <fst/vector-fst.h>
namespace fst {
// Test if two FSTs are stochastically equivalent by randomly generating
// random paths through the FSTs.
//
// For each randomly generated path, the algorithm computes for each
// of the two FSTs the sum of the weights of all the successful paths
// sharing the same input and output labels as the considered randomly
// generated path and checks that these two values are within a user-specified
// delta. Returns optional error value (when FLAGS_error_fatal = false).
template <class Arc, class ArcSelector>
bool RandEquivalent(const Fst<Arc> &fst1, const Fst<Arc> &fst2,
int32 num_paths, float delta,
const RandGenOptions<ArcSelector> &opts,
bool *error = nullptr) {
using Weight = typename Arc::Weight;
if (error) *error = false;
// Checks that the symbol table are compatible.
if (!CompatSymbols(fst1.InputSymbols(), fst2.InputSymbols()) ||
!CompatSymbols(fst1.OutputSymbols(), fst2.OutputSymbols())) {
FSTERROR() << "RandEquivalent: Input/output symbol tables of 1st "
<< "argument do not match input/output symbol tables of 2nd "
<< "argument";
if (error) *error = true;
return false;
}
static const ILabelCompare<Arc> icomp;
static const OLabelCompare<Arc> ocomp;
VectorFst<Arc> sfst1(fst1);
VectorFst<Arc> sfst2(fst2);
Connect(&sfst1);
Connect(&sfst2);
ArcSort(&sfst1, icomp);
ArcSort(&sfst2, icomp);
bool result = true;
for (int32 n = 0; n < num_paths; ++n) {
VectorFst<Arc> path;
const auto &fst = rand() % 2 ? sfst1 : sfst2; // NOLINT
RandGen(fst, &path, opts);
VectorFst<Arc> ipath(path);
VectorFst<Arc> opath(path);
Project(&ipath, PROJECT_INPUT);
Project(&opath, PROJECT_OUTPUT);
VectorFst<Arc> cfst1, pfst1;
Compose(ipath, sfst1, &cfst1);
ArcSort(&cfst1, ocomp);
Compose(cfst1, opath, &pfst1);
// Gives up if there are epsilon cycles in a non-idempotent semiring.
if (!(Weight::Properties() & kIdempotent) &&
pfst1.Properties(kCyclic, true)) {
continue;
}
const auto sum1 = ShortestDistance(pfst1);
VectorFst<Arc> cfst2;
Compose(ipath, sfst2, &cfst2);
ArcSort(&cfst2, ocomp);
VectorFst<Arc> pfst2;
Compose(cfst2, opath, &pfst2);
// Gives up if there are epsilon cycles in a non-idempotent semiring.
if (!(Weight::Properties() & kIdempotent) &&
pfst2.Properties(kCyclic, true)) {
continue;
}
const auto sum2 = ShortestDistance(pfst2);
if (!ApproxEqual(sum1, sum2, delta)) {
VLOG(1) << "Sum1 = " << sum1;
VLOG(1) << "Sum2 = " << sum2;
result = false;
break;
}
}
if (fst1.Properties(kError, false) || fst2.Properties(kError, false)) {
if (error) *error = true;
return false;
}
return result;
}
// Tests if two FSTs are equivalent by randomly generating a nnum_paths paths
// (no longer than the path_length) using a user-specified seed, optionally
// indicating an error setting an optional error argument to true.
template <class Arc>
bool RandEquivalent(const Fst<Arc> &fst1, const Fst<Arc> &fst2, int32 num_paths,
float delta = kDelta, time_t seed = time(nullptr),
int32 max_length = std::numeric_limits<int32>::max(),
bool *error = nullptr) {
const UniformArcSelector<Arc> uniform_selector(seed);
const RandGenOptions<UniformArcSelector<Arc>> opts(uniform_selector,
max_length);
return RandEquivalent(fst1, fst2, num_paths, delta, opts, error);
}
} // namespace fst
#endif // FST_RANDEQUIVALENT_H_
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/bin/import_timit.py | #!/usr/bin/env python
"""
NAME : LDC TIMIT Dataset
URL : https://catalog.ldc.upenn.edu/ldc93s1
HOURS : 5
TYPE : Read - English
AUTHORS : Garofolo, John, et al.
TYPE : LDC Membership
LICENCE : LDC User Agreement
"""
import errno
import fnmatch
import os
import subprocess
import sys
import tarfile
from os import path
import pandas as pd
def clean(word):
# LC ALL & strip punctuation which are not required
new = word.lower().replace(".", "")
new = new.replace(",", "")
new = new.replace(";", "")
new = new.replace('"', "")
new = new.replace("!", "")
new = new.replace("?", "")
new = new.replace(":", "")
new = new.replace("-", "")
return new
def _preprocess_data(args):
# Assume data is downloaded from LDC - https://catalog.ldc.upenn.edu/ldc93s1
# SA sentences are repeated throughout by each speaker therefore can be removed for ASR as they will affect WER
ignoreSASentences = True
if ignoreSASentences:
print("Using recommended ignore SA sentences")
print(
"Ignoring SA sentences (2 x sentences which are repeated by all speakers)"
)
else:
print("Using unrecommended setting to include SA sentences")
datapath = args
target = path.join(datapath, "TIMIT")
print(
"Checking to see if data has already been extracted in given argument: %s",
target,
)
if not path.isdir(target):
print(
"Could not find extracted data, trying to find: TIMIT-LDC93S1.tgz in: ",
datapath,
)
filepath = path.join(datapath, "TIMIT-LDC93S1.tgz")
if path.isfile(filepath):
print("File found, extracting")
tar = tarfile.open(filepath)
tar.extractall(target)
tar.close()
else:
print("File should be downloaded from LDC and placed at:", filepath)
strerror = "File not found"
raise IOError(errno, strerror, filepath)
else:
# is path therefore continue
print("Found extracted data in: ", target)
print("Preprocessing data")
# We convert the .WAV (NIST sphere format) into MSOFT .wav
# creates _rif.wav as the new .wav file
for root, dirnames, filenames in os.walk(target):
for filename in fnmatch.filter(filenames, "*.WAV"):
sph_file = os.path.join(root, filename)
wav_file = os.path.join(root, filename)[:-4] + "_rif.wav"
print("converting {} to {}".format(sph_file, wav_file))
subprocess.check_call(["sox", sph_file, wav_file])
print("Preprocessing Complete")
print("Building CSVs")
# Lists to build CSV files
train_list_wavs, train_list_trans, train_list_size = [], [], []
test_list_wavs, test_list_trans, test_list_size = [], [], []
for root, dirnames, filenames in os.walk(target):
for filename in fnmatch.filter(filenames, "*_rif.wav"):
full_wav = os.path.join(root, filename)
wav_filesize = path.getsize(full_wav)
# need to remove _rif.wav (8chars) then add .TXT
trans_file = full_wav[:-8] + ".TXT"
with open(trans_file, "r") as f:
for line in f:
split = line.split()
start = split[0]
end = split[1]
t_list = split[2:]
trans = ""
for t in t_list:
trans = trans + " " + clean(t)
# if ignoreSAsentences we only want those without SA in the name
# OR
# if not ignoreSAsentences we want all to be added
if (ignoreSASentences and not ("SA" in os.path.basename(full_wav))) or (
not ignoreSASentences
):
if "train" in full_wav.lower():
train_list_wavs.append(full_wav)
train_list_trans.append(trans)
train_list_size.append(wav_filesize)
elif "test" in full_wav.lower():
test_list_wavs.append(full_wav)
test_list_trans.append(trans)
test_list_size.append(wav_filesize)
else:
raise IOError
a = {
"wav_filename": train_list_wavs,
"wav_filesize": train_list_size,
"transcript": train_list_trans,
}
c = {
"wav_filename": test_list_wavs,
"wav_filesize": test_list_size,
"transcript": test_list_trans,
}
all = {
"wav_filename": train_list_wavs + test_list_wavs,
"wav_filesize": train_list_size + test_list_size,
"transcript": train_list_trans + test_list_trans,
}
df_all = pd.DataFrame(
all, columns=["wav_filename", "wav_filesize", "transcript"], dtype=int
)
df_train = pd.DataFrame(
a, columns=["wav_filename", "wav_filesize", "transcript"], dtype=int
)
df_test = pd.DataFrame(
c, columns=["wav_filename", "wav_filesize", "transcript"], dtype=int
)
df_all.to_csv(
target + "/timit_all.csv", sep=",", header=True, index=False, encoding="ascii"
)
df_train.to_csv(
target + "/timit_train.csv", sep=",", header=True, index=False, encoding="ascii"
)
df_test.to_csv(
target + "/timit_test.csv", sep=",", header=True, index=False, encoding="ascii"
)
if __name__ == "__main__":
_preprocess_data(sys.argv[1])
print("Completed")
| 0 |
coqui_public_repos/stt-model-manager | coqui_public_repos/stt-model-manager/src/App.jsx | import React, {Component} from 'react';
import io from 'socket.io-client';
const DOWNSAMPLING_WORKER = '/static/downsampling_worker.js';
class App extends Component {
constructor(props) {
super(props);
this.state = {
connected: false,
recording: false,
recordingStart: 0,
recordingTime: 0,
recognitionCount: 0,
recognitionOutput: [],
modelName: props.modelName
};
}
componentDidMount() {
this.socket = io();
this.socket.emit('start', this.props.modelName);
this.socket.on('connect', () => {
console.log('socket connected');
this.setState({connected: true});
});
this.socket.on('disconnect', () => {
console.log('socket disconnected');
this.setState({connected: false});
this.stopRecording();
});
this.socket.on('recognize', (results) => {
console.log('recognized:', results);
let {recognitionCount, recognitionOutput} = this.state;
recognitionOutput[0].text = results.text;
recognitionOutput.unshift({id: recognitionCount++, text: ""});
// Keep only 5 results visible
recognitionOutput = recognitionOutput.slice(0, 5);
this.setState({recognitionCount, recognitionOutput});
});
this.socket.on('intermediate', (results) => {
console.log('intermediate:', results);
let {recognitionOutput} = this.state;
recognitionOutput[0].text = results.text;
this.setState({recognitionOutput});
});
}
render() {
return (<div className="App">
<div>
<button className="rec-btn btn btn-outline-dark" disabled={!this.state.connected || this.state.recording} onClick={this.startRecording}>
Start Recording
</button>
<button className="rec-btn btn btn-outline-dark" disabled={!this.state.recording} onClick={this.stopRecording}>
Stop Recording
</button>
{this.renderTime()}
</div>
{this.renderRecognitionOutput()}
</div>);
}
renderTime() {
return (<span className="time-badge badge bg-secondary">
{(Math.round(this.state.recordingTime / 100) / 10).toFixed(1)}s
</span>);
}
renderRecognitionOutput() {
return (<ul className="stt-results-list">
{this.state.recognitionOutput.map((r) => {
return (<li key={r.id}>{r.text}</li>);
})}
</ul>)
}
createAudioProcessor(audioContext, audioSource) {
let processor = audioContext.createScriptProcessor(4096, 1, 1);
const sampleRate = audioSource.context.sampleRate;
let downsampler = new Worker(DOWNSAMPLING_WORKER);
downsampler.postMessage({command: "init", inputSampleRate: sampleRate});
downsampler.onmessage = (e) => {
if (this.socket.connected) {
this.socket.emit('stream-data', e.data.buffer);
}
};
processor.onaudioprocess = (event) => {
var data = event.inputBuffer.getChannelData(0);
downsampler.postMessage({command: "process", inputFrame: data});
};
processor.shutdown = () => {
processor.disconnect();
this.onaudioprocess = null;
};
processor.connect(audioContext.destination);
return processor;
}
startRecording = e => {
if (!this.state.recording) {
let {recognitionCount, recognitionOutput} = this.state;
recognitionOutput.unshift({id: recognitionCount++, text: ""});
this.setState({recognitionCount, recognitionOutput});
this.recordingInterval = setInterval(() => {
let recordingTime = new Date().getTime() - this.state.recordingStart;
this.setState({recordingTime});
}, 100);
this.updatesInterval = setInterval(() => {
this.socket.emit('stream-intermediate');
}, 400);
this.setState({
recording: true,
recordingStart: new Date().getTime(),
recordingTime: 0
}, () => {
this.startMicrophone();
});
}
};
startMicrophone() {
this.audioContext = new AudioContext();
const success = (stream) => {
console.log('started recording');
this.mediaStream = stream;
this.mediaStreamSource = this.audioContext.createMediaStreamSource(stream);
this.processor = this.createAudioProcessor(this.audioContext, this.mediaStreamSource);
this.mediaStreamSource.connect(this.processor);
};
const fail = (e) => {
console.error('recording failure', e);
};
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
})
.then(success)
.catch(fail);
}
else {
navigator.getUserMedia({
video: false,
audio: true
}, success, fail);
}
}
stopRecording = e => {
if (this.state.recording) {
let {recognitionCount, recognitionOutput} = this.state;
if (recognitionOutput[0].text.length === 0) {
recognitionOutput = recognitionOutput.slice(1);
recognitionCount--;
this.setState({recognitionCount, recognitionOutput});
}
if (this.socket.connected) {
this.socket.emit('stream-reset');
}
clearInterval(this.recordingInterval);
clearInterval(this.updatesInterval);
this.setState({
recording: false
}, () => {
this.stopMicrophone();
});
}
};
stopMicrophone() {
if (this.mediaStream) {
this.mediaStream.getTracks()[0].stop();
}
if (this.mediaStreamSource) {
this.mediaStreamSource.disconnect();
}
if (this.processor) {
this.processor.shutdown();
}
if (this.audioContext) {
this.audioContext.close();
}
}
}
export default App;
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/doc/index.rst | .. Coqui STT documentation main file, created by
sphinx-quickstart on Thu Feb 2 21:20:39 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
.. image:: https://raw.githubusercontent.com/coqui-ai/STT/main/images/coqui-STT-logo-green.png
:alt: Coqui STT logo and wordmark
**Coqui STT** (🐸STT) is an open-source deep-learning toolkit for training and deploying speech-to-text models.
🐸STT is battle tested in both production and research 🚀
.. toctree::
:maxdepth: 1
:caption: Quick Reference
DEPLOYMENT
TRAINING_INTRO
TRAINING_ADVANCED
BUILDING
Quickstart
^^^^^^^^^^
The fastest way to use a pre-trained 🐸STT model is with the 🐸STT model manager, a tool that lets you quickly test and demo models locally. You'll need Python 3.6, 3.7, 3.8 or 3.9:
.. code-block:: bash
# Create a virtual environment
$ python3 -m venv venv-stt
$ source venv-stt/bin/activate
# Install 🐸STT model manager
$ python -m pip install -U pip
$ python -m pip install coqui-stt-model-manager
# Run the model manager. A browser tab will open and you can then download and test models from the Model Zoo.
$ stt-model-manager
.. toctree::
:maxdepth: 1
:caption: API Reference
Error-Codes
C-API
DotNet-API
Java-API
NodeJS-API
Python-API
.. toctree::
:maxdepth: 1
:caption: Examples
Python-Examples
NodeJS-Examples
C-Examples
DotNet-Examples
Java-Examples
HotWordBoosting-Examples
Contributed-Examples
.. toctree::
:maxdepth: 1
:caption: Language Model
LANGUAGE_MODEL
.. include:: SUPPORT.rst
.. toctree::
:maxdepth: 1
:caption: STT Playbook
playbook/README
.. toctree::
:maxdepth: 1
:caption: Advanced topics
DECODER
Decoder-API
PARALLEL_OPTIMIZATION
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/script/convert.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_SCRIPT_CONVERT_H_
#define FST_SCRIPT_CONVERT_H_
#include <memory>
#include <string>
#include <utility>
#include <fst/register.h>
#include <fst/script/arg-packs.h>
#include <fst/script/fst-class.h>
namespace fst {
namespace script {
using ConvertInnerArgs = std::pair<const FstClass &, const string &>;
using ConvertArgs = WithReturnValue<FstClass *, ConvertInnerArgs>;
template <class Arc>
void Convert(ConvertArgs *args) {
const Fst<Arc> &fst = *(std::get<0>(args->args).GetFst<Arc>());
const string &new_type = std::get<1>(args->args);
std::unique_ptr<Fst<Arc>> result(Convert(fst, new_type));
args->retval = result ? new FstClass(*result) : nullptr;
}
FstClass *Convert(const FstClass &fst, const string &new_type);
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_CONVERT_H_
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/special/CMakeLists.txt | file(GLOB HEADER_FILES ../../include/fst/extensions/special/*.h)
message(STATUS "${HEADER_FILES}")
if(HAVE_BIN)
add_executable(fstspecial-bin
../../bin/fstconvert.cc
../../bin/fstconvert-main.cc
phi-fst.cc
rho-fst.cc
sigma-fst.cc
)
set_target_properties(fstspecial-bin PROPERTIES
FOLDER special/bin
OUTPUT_NAME fstspecial
)
target_link_libraries(fstspecial-bin
fstscript
fst
${CMAKE_DL_LIBS}
)
endif(HAVE_BIN)
add_library(fstspecial
phi-fst.cc
rho-fst.cc
sigma-fst.cc
${HEADER_FILES}
)
set_target_properties(fstspecial PROPERTIES
SOVERSION "${SOVERSION}"
FOLDER special
)
target_link_libraries(fstspecial
fst
)
install(TARGETS fstspecial fstspecial-bin
LIBRARY DESTINATION lib
RUNTIME DESTINATION bin
ARCHIVE DESTINATION lib
)
function (add_module _name)
add_library(${ARGV})
if (TARGET ${_name})
target_link_libraries(${_name} fst)
set_target_properties(${_name}
PROPERTIES WINDOWS_EXPORT_ALL_SYMBOLS true
FOLDER special/modules
)
endif()
install(TARGETS ${_name} LIBRARY DESTINATION lib/fst)
endfunction()
add_module(phi-fst MODULE phi-fst.cc)
add_module(rho-fst MODULE rho-fst.cc)
add_module(sigma-fst MODULE sigma-fst.cc)
| 0 |
coqui_public_repos/TTS/recipes | coqui_public_repos/TTS/recipes/ljspeech/README.md | # 🐸💬 TTS LJspeech Recipes
For running the recipes
1. Download the LJSpeech dataset here either manually from [its official website](https://keithito.com/LJ-Speech-Dataset/) or using ```download_ljspeech.sh```.
2. Go to your desired model folder and run the training.
Running Python files. (Choose the desired GPU ID for your run and set ```CUDA_VISIBLE_DEVICES```)
```terminal
CUDA_VISIBLE_DEVICES="0" python train_modelX.py
```
Running bash scripts.
```terminal
bash run.sh
```
💡 Note that these runs are just templates to help you start training your first model. They are not optimized for the best
result. Double-check the configurations and feel free to share your experiments to find better parameters together 💪.
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/union-find.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Union-find algorithm for dense sets of non-negative integers, implemented
// using disjoint tree forests with rank heuristics and path compression.
#ifndef FST_UNION_FIND_H_
#define FST_UNION_FIND_H_
#include <stack>
#include <vector>
namespace fst {
// Union-Find algorithm for dense sets of non-negative integers.
template <class T>
class UnionFind {
public:
// Creates a disjoint set forest for the range [0; max); 'fail' is a value
// indicating that an element hasn't been initialized using MakeSet(...).
// The upper bound of the range can be reset (increased) using MakeSet(...).
UnionFind(T max, T fail) : parent_(max, fail), rank_(max), fail_(fail) {}
// Finds the representative of the set 'item' belongs to, performing path
// compression if necessary.
T FindSet(T item) {
if (item >= parent_.size() || item == fail_ || parent_[item] == fail_) {
return fail_;
}
auto *p = &parent_[item];
for (; *p != item; item = *p, p = &parent_[item]) exec_stack_.push(p);
for (; !exec_stack_.empty(); exec_stack_.pop()) *exec_stack_.top() = *p;
return *p;
}
// Creates the (destructive) union of the sets x and y belong to.
void Union(T x, T y) { Link(FindSet(x), FindSet(y)); }
// Initialization of an element: creates a singleton set containing 'item'.
// The range [0; max) is reset if item >= max.
T MakeSet(T item) {
if (item >= parent_.size()) {
// New value in parent_ should be initialized to fail_.
const auto nitem = item > 0 ? 2 * item : 2;
parent_.resize(nitem, fail_);
rank_.resize(nitem);
}
parent_[item] = item;
return item;
}
// Initialization of all elements starting from 0 to max - 1 to distinct sets.
void MakeAllSet(T max) {
parent_.resize(max);
for (T item = 0; item < max; ++item) parent_[item] = item;
}
private:
// Links trees rooted in 'x' and 'y'.
void Link(T x, T y) {
if (x == y) return;
if (rank_[x] > rank_[y]) {
parent_[y] = x;
} else {
parent_[x] = y;
if (rank_[x] == rank_[y]) {
++rank_[y];
}
}
}
UnionFind(const UnionFind &) = delete;
UnionFind &operator=(const UnionFind &) = delete;
std::vector<T> parent_; // Parent nodes.
std::vector<int> rank_; // Rank of an element = min. depth in tree.
T fail_; // Value indicating lookup failure.
std::stack<T *> exec_stack_; // Used for path compression.
};
} // namespace fst
#endif // FST_UNION_FIND_H_
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/log.h | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Google-style logging declarations and inline definitions.
#ifndef FST_LIB_LOG_H_
#define FST_LIB_LOG_H_
#include <cassert>
#include <iostream>
#include <string>
#include <fst/types.h>
#include <fst/flags.h>
using std::string;
DECLARE_int32(v);
class LogMessage {
public:
LogMessage(const string &type) : fatal_(type == "FATAL") {
std::cerr << type << ": ";
}
~LogMessage() {
std::cerr << std::endl;
if(fatal_)
exit(1);
}
std::ostream &stream() { return std::cerr; }
private:
bool fatal_;
};
#define LOG(type) LogMessage(#type).stream()
#define VLOG(level) if ((level) <= FLAGS_v) LOG(INFO)
// Checks
inline void FstCheck(bool x, const char* expr,
const char *file, int line) {
if (!x) {
LOG(FATAL) << "Check failed: \"" << expr
<< "\" file: " << file
<< " line: " << line;
}
}
#define CHECK(x) FstCheck(static_cast<bool>(x), #x, __FILE__, __LINE__)
#define CHECK_EQ(x, y) CHECK((x) == (y))
#define CHECK_LT(x, y) CHECK((x) < (y))
#define CHECK_GT(x, y) CHECK((x) > (y))
#define CHECK_LE(x, y) CHECK((x) <= (y))
#define CHECK_GE(x, y) CHECK((x) >= (y))
#define CHECK_NE(x, y) CHECK((x) != (y))
// Debug checks
#define DCHECK(x) assert(x)
#define DCHECK_EQ(x, y) DCHECK((x) == (y))
#define DCHECK_LT(x, y) DCHECK((x) < (y))
#define DCHECK_GT(x, y) DCHECK((x) > (y))
#define DCHECK_LE(x, y) DCHECK((x) <= (y))
#define DCHECK_GE(x, y) DCHECK((x) >= (y))
#define DCHECK_NE(x, y) DCHECK((x) != (y))
// Ports
#define ATTRIBUTE_DEPRECATED __attribute__((deprecated))
#endif // FST_LIB_LOG_H_
| 0 |
coqui_public_repos/STT-examples/net_framework/STTWPF | coqui_public_repos/STT-examples/net_framework/STTWPF/Properties/Resources.Designer.cs | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace STT.WPF.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("STT.WPF.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/script/encodemapper-class.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_SCRIPT_ENCODEMAPPER_CLASS_H_
#define FST_SCRIPT_ENCODEMAPPER_CLASS_H_
#include <memory>
#include <string>
#include <iostream>
#include <fst/fstlib.h>
#include <fst/script/arc-class.h>
#include <fst/script/fst-class.h>
// Scripting API support for EncodeMapper.
namespace fst {
namespace script {
// Virtual interface implemented by each concrete EncodeMapperClassImpl<A>.
class EncodeMapperImplBase {
public:
// Returns an encoded ArcClass.
virtual ArcClass operator()(const ArcClass &a) = 0;
virtual const string &ArcType() const = 0;
virtual uint32 Flags() const = 0;
virtual uint64 Properties(uint64 inprops) = 0;
virtual EncodeType Type() const = 0;
virtual const SymbolTable *InputSymbols() const = 0;
virtual const SymbolTable *OutputSymbols() const = 0;
virtual void SetInputSymbols(const SymbolTable *syms) = 0;
virtual void SetOutputSymbols(const SymbolTable *syms) = 0;
virtual const string &WeightType() const = 0;
virtual ~EncodeMapperImplBase() {}
};
// Templated implementation.
template <class Arc>
class EncodeMapperClassImpl : public EncodeMapperImplBase {
public:
EncodeMapperClassImpl(uint32 flags, EncodeType type)
: encoder_(flags, type) {}
ArcClass operator()(const ArcClass &a) final;
const string &ArcType() const final { return Arc::Type(); }
uint32 Flags() const final { return encoder_.Flags(); }
uint64 Properties(uint64 inprops) final {
return encoder_.Properties(inprops);
}
EncodeType Type() const final { return encoder_.Type(); }
const SymbolTable *InputSymbols() const final {
return encoder_.InputSymbols();
}
const SymbolTable *OutputSymbols() const final {
return encoder_.OutputSymbols();
}
void SetInputSymbols(const SymbolTable *syms) final {
encoder_.SetInputSymbols(syms);
}
void SetOutputSymbols(const SymbolTable *syms) final {
encoder_.SetOutputSymbols(syms);
}
const string &WeightType() const final { return Arc::Weight::Type(); }
~EncodeMapperClassImpl() override {}
EncodeMapper<Arc> *GetImpl() const { return &encoder_; }
EncodeMapper<Arc> *GetImpl() { return &encoder_; }
private:
EncodeMapper<Arc> encoder_;
};
// This is returned by value because it is very likely to undergo return-value
// optimization.
template <class Arc>
inline ArcClass EncodeMapperClassImpl<Arc>::operator()(const ArcClass &a) {
Arc arc(a.ilabel, a.olabel, *(a.weight.GetWeight<typename Arc::Weight>()),
a.nextstate);
return ArcClass(encoder_(arc));
}
class EncodeMapperClass;
using InitEncodeMapperClassArgs =
std::tuple<uint32, EncodeType, EncodeMapperClass *>;
class EncodeMapperClass {
public:
EncodeMapperClass(const string &arc_type, uint32 flags, EncodeType type);
template <class Arc>
EncodeMapperClass(uint32 flags, EncodeType type)
: impl_(new EncodeMapperClassImpl<Arc>(flags, type)) {}
ArcClass operator()(const ArcClass &arc) { return (*impl_)(arc); }
const string &ArcType() const { return impl_->ArcType(); }
uint32 Flags() const { return impl_->Flags(); }
uint64 Properties(uint64 inprops) { return impl_->Properties(inprops); }
EncodeType Type() const { return impl_->Type(); }
const SymbolTable *InputSymbols() const { return impl_->InputSymbols(); }
const SymbolTable *OutputSymbols() const { return impl_->OutputSymbols(); }
void SetInputSymbols(const SymbolTable *syms) {
impl_->SetInputSymbols(syms);
}
void SetOutputSymbols(const SymbolTable *syms) {
impl_->SetOutputSymbols(syms);
}
const string &WeightType() const { return impl_->WeightType(); }
template <class Arc>
friend void InitEncodeMapperClass(InitEncodeMapperClassArgs *args);
// Naturally, this exists in non-const and const forms. Encoding arcs or FSTs
// mutates the underlying encoder; decoding them does not.
template <class Arc>
EncodeMapper<Arc> *GetEncodeMapper() {
if (Arc::Type() != ArcType()) {
return nullptr;
} else {
auto *typed_impl = static_cast<EncodeMapperClassImpl<Arc> *>(impl_.get());
return typed_impl->GetImpl();
}
}
template <class Arc>
const EncodeMapper<Arc> *GetEncodeMapper() const {
if (Arc::Type() != ArcType()) {
return nullptr;
} else {
auto *typed_impl = static_cast<EncodeMapperClassImpl<Arc> *>(impl_.get());
return typed_impl->GetImpl();
}
}
private:
std::unique_ptr<EncodeMapperImplBase> impl_;
};
template <class Arc>
void InitEncodeMapperClass(InitEncodeMapperClassArgs *args) {
std::get<2>(*args)->impl_.reset(
new EncodeMapperClassImpl<Arc>(std::get<0>(*args), std::get<1>(*args)));
}
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_ENCODEMAPPER_CLASS_H_
| 0 |
coqui_public_repos | coqui_public_repos/TTS/Makefile | .DEFAULT_GOAL := help
.PHONY: test system-deps dev-deps deps style lint install help docs
help:
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
target_dirs := tests TTS notebooks recipes
test_all: ## run tests and don't stop on an error.
nose2 --with-coverage --coverage TTS tests
./run_bash_tests.sh
test: ## run tests.
nose2 -F -v -B --with-coverage --coverage TTS tests
test_vocoder: ## run vocoder tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.vocoder_tests
test_tts: ## run tts tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.tts_tests
test_tts2: ## run tts tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.tts_tests2
test_xtts:
nose2 -F -v -B --with-coverage --coverage TTS tests.xtts_tests
test_aux: ## run aux tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.aux_tests
./run_bash_tests.sh
test_zoo: ## run zoo tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.zoo_tests
inference_tests: ## run inference tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.inference_tests
data_tests: ## run data tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.data_tests
test_text: ## run text tests.
nose2 -F -v -B --with-coverage --coverage TTS tests.text_tests
test_failed: ## only run tests failed the last time.
nose2 -F -v -B --with-coverage --coverage TTS tests
style: ## update code style.
black ${target_dirs}
isort ${target_dirs}
lint: ## run pylint linter.
pylint ${target_dirs}
black ${target_dirs} --check
isort ${target_dirs} --check-only
system-deps: ## install linux system deps
sudo apt-get install -y libsndfile1-dev
dev-deps: ## install development deps
pip install -r requirements.dev.txt
doc-deps: ## install docs dependencies
pip install -r docs/requirements.txt
build-docs: ## build the docs
cd docs && make clean && make build
hub-deps: ## install deps for torch hub use
pip install -r requirements.hub.txt
deps: ## install 🐸 requirements.
pip install -r requirements.txt
install: ## install 🐸 TTS for development.
pip install -e .[all]
docs: ## build the docs
$(MAKE) -C docs clean && $(MAKE) -C docs html
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/bin/run-ci-ldc93s1-vorbis.sh | #!/bin/sh
set -xe
if [ ! -f train.py ]; then
echo "Please make sure you run this from STT's top level directory."
exit 1
fi;
if [ ! -f "data/smoke_test/ldc93s1.csv" ]; then
echo "Downloading and preprocessing LDC93S1 example data, saving in ./data/smoke_test."
python -u bin/import_ldc93s1.py ./data/smoke_test
fi;
checkpoint_dir="$HOME/.local/share/stt/ldc93s1"
# Force only one visible device because we have a single-sample dataset
# and when trying to run on multiple devices (like GPUs), this will break
export CUDA_VISIBLE_DEVICES=0
python -m coqui_stt_training.train \
--alphabet_config_path "data/alphabet.txt" \
--show_progressbar false \
--train_files data/smoke_test/ldc93s1_vorbis.csv \
--test_files data/smoke_test/ldc93s1_vorbis.csv \
--train_batch_size 1 \
--test_batch_size 1 \
--n_hidden 100 \
--epochs 200 \
--checkpoint_dir "$checkpoint_dir" \
"$@"
| 0 |
coqui_public_repos/STT-models/tatar/itml | coqui_public_repos/STT-models/tatar/itml/v0.1.0/alphabet.txt |
а
б
в
г
д
е
ж
з
и
й
к
л
м
н
о
п
р
с
т
у
ф
х
ц
ч
ш
щ
ъ
ы
ь
э
ю
я
ё
җ
ң
ү
һ
ә
ө
| 0 |
coqui_public_repos/Trainer/trainer | coqui_public_repos/Trainer/trainer/logging/mlflow_logger.py | import os
import shutil
import tempfile
import traceback
import soundfile as sf
import torch
from trainer.logging.base_dash_logger import BaseDashboardLogger
from trainer.trainer_utils import is_mlflow_available
from trainer.utils.distributed import rank_zero_only
if is_mlflow_available():
from mlflow.tracking import MlflowClient
from mlflow.tracking.context.registry import resolve_tags
from mlflow.utils.mlflow_tags import MLFLOW_RUN_NAME
# pylint: skip-file
class MLFlowLogger(BaseDashboardLogger):
def __init__(
self,
log_uri: str,
model_name: str,
tags: str = None,
):
self.model_name = model_name
self.client = MlflowClient(tracking_uri=os.path.join(log_uri))
experiment = self.client.get_experiment_by_name(model_name)
if experiment is None:
self.experiment_id = self.client.create_experiment(name=model_name)
else:
self.experiment_id = experiment.experiment_id
if tags is not None:
self.client.set_experiment_tag(self.experiment_id, MLFLOW_RUN_NAME, tags)
run = self.client.create_run(experiment_id=self.experiment_id, tags=resolve_tags(tags))
self.run_id = run.info.run_id
def model_weights(self, model, step):
layer_num = 1
for name, param in model.named_parameters():
if param.numel() == 1:
self.client.log_metric("layer{}-{}/value".format(layer_num, name), param.max(), step)
else:
self.client.log_metric("layer{}-{}/max".format(layer_num, name), param.max(), step)
self.client.log_metric("layer{}-{}/min".format(layer_num, name), param.min(), step)
self.client.log_metric("layer{}-{}/mean".format(layer_num, name), param.mean(), step)
self.client.log_metric("layer{}-{}/std".format(layer_num, name), param.std(), step)
# MlFlow does not support histograms
# self.client.add_histogram("layer{}-{}/param".format(layer_num, name), param, step)
# self.client.add_histogram("layer{}-{}/grad".format(layer_num, name), param.grad, step)
layer_num += 1
def add_config(self, config):
self.add_text("model-config", f"<pre>{config.to_json()}</pre>", 0)
def add_scalar(self, title, value, step):
self.client.log_metric(self.run_id, title, value, step)
def add_text(self, title, text, step):
self.client.log_text(self.run_id, text, "{}/{}.txt".format(title, step))
def add_figure(self, title, figure, step):
self.client.log_figure(figure, "{}/{}.png".format(title, step))
def add_artifact(self, file_or_dir, name, artifact_type, aliases=None): # pylint: disable=W0613, R0201
self.client.log_artifacts(self.run_id, file_or_dir)
def add_audio(self, title, audio, step, sample_rate):
self.client.log_audio(self.run_id, audio, "{}/{}.wav".format(title, step), sample_rate)
@rank_zero_only
def add_scalars(self, scope_name, stats, step):
for key, value in stats.items():
if torch.is_tensor(value):
value = value.item()
self.client.log_metric(self.run_id, "{}-{}".format(scope_name, key), value, step)
@rank_zero_only
def add_figures(self, scope_name, figures, step):
for key, value in figures.items():
self.client.log_figure(self.run_id, value, "{}/{}/{}.png".format(scope_name, key, step))
@rank_zero_only
def add_audios(self, scope_name, audios, step, sample_rate):
for key, value in audios.items():
if value.dtype == "float16":
value = value.astype("float32")
try:
tmp_audio_path = tempfile.NamedTemporaryFile(suffix=".wav")
sf.write(tmp_audio_path, value, sample_rate)
self.client.log_artifact(
self.run_id,
tmp_audio_path,
"{}/{}/{}.wav".format(scope_name, key, step),
)
shutil.rmtree(tmp_audio_path)
except RuntimeError:
traceback.print_exc()
def train_step_stats(self, step, stats):
self.client.set_tag(self.run_id, "Mode", "training")
super().train_step_stats(step, stats)
def train_epoch_stats(self, step, stats):
self.client.set_tag(self.run_id, "Mode", "training")
super().train_epoch_stats(step, stats)
def train_figures(self, step, figures):
self.client.set_tag(self.run_id, "Mode", "training")
super().train_figures(step, figures)
def train_audios(self, step, audios, sample_rate):
self.client.set_tag(self.run_id, "Mode", "training")
super().train_audios(step, audios, sample_rate)
def eval_stats(self, step, stats):
self.client.set_tag(self.run_id, "Mode", "evaluation")
super().eval_stats(step, stats)
def eval_figures(self, step, figures):
self.client.set_tag(self.run_id, "Mode", "evaluation")
super().eval_figures(step, figures)
def eval_audios(self, step, audios, sample_rate):
self.client.set_tag(self.run_id, "Mode", "evaluation")
super().eval_audios(step, audios, sample_rate)
def test_audios(self, step, audios, sample_rate):
self.client.set_tag(self.run_id, "Mode", "test")
super().test_audios(step, audios, sample_rate)
def test_figures(self, step, figures):
self.client.set_tag(self.run_id, "Mode", "test")
super().test_figures(step, figures)
def flush(self):
pass
@rank_zero_only
def finish(self):
super().finalize(status)
status = "FINISHED" if status == "success" else status
if self.client.get_run(self.run_id):
self.client.set_terminated(self.run_id, status)
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/special/rho-fst.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/extensions/special/rho-fst.h>
#include <fst/fst.h>
DEFINE_int64(rho_fst_rho_label, 0,
"Label of transitions to be interpreted as rho ('rest') "
"transitions");
DEFINE_string(rho_fst_rewrite_mode, "auto",
"Rewrite both sides when matching? One of:"
" \"auto\" (rewrite iff acceptor), \"always\", \"never\"");
namespace fst {
const char rho_fst_type[] = "rho";
const char input_rho_fst_type[] = "input_rho";
const char output_rho_fst_type[] = "output_rho";
static FstRegisterer<StdRhoFst> RhoFst_StdArc_registerer;
static FstRegisterer<LogRhoFst> RhoFst_LogArc_registerer;
static FstRegisterer<Log64RhoFst> RhoFst_Log64Arc_registerer;
static FstRegisterer<StdInputRhoFst> InputRhoFst_StdArc_registerer;
static FstRegisterer<LogInputRhoFst> InputRhoFst_LogArc_registerer;
static FstRegisterer<Log64InputRhoFst> InputRhoFst_Log64Arc_registerer;
static FstRegisterer<StdOutputRhoFst> OutputRhoFst_StdArc_registerer;
static FstRegisterer<LogOutputRhoFst> OutputRhoFst_LogArc_registerer;
static FstRegisterer<Log64OutputRhoFst> OutputRhoFst_Log64Arc_registerer;
} // namespace fst
| 0 |
coqui_public_repos/STT/native_client/kenlm/lm | coqui_public_repos/STT/native_client/kenlm/lm/common/print.cc | #include "print.hh"
#include "ngram_stream.hh"
#include "../../util/file_stream.hh"
#include "../../util/file.hh"
#include "../../util/mmap.hh"
#include "../../util/scoped.hh"
#include <sstream>
#include <cstring>
namespace lm {
VocabReconstitute::VocabReconstitute(int fd) {
uint64_t size = util::SizeOrThrow(fd);
util::MapRead(util::POPULATE_OR_READ, fd, 0, size, memory_);
const char *const start = static_cast<const char*>(memory_.get());
const char *i;
for (i = start; i != start + size; i += strlen(i) + 1) {
map_.push_back(i);
}
// Last one for LookupPiece.
map_.push_back(i);
}
namespace {
template <class Payload> void PrintLead(const VocabReconstitute &vocab, ProxyStream<Payload> &stream, util::FileStream &out) {
out << stream->Value().prob << '\t' << vocab.Lookup(*stream->begin());
for (const WordIndex *i = stream->begin() + 1; i != stream->end(); ++i) {
out << ' ' << vocab.Lookup(*i);
}
}
} // namespace
void PrintARPA::Run(const util::stream::ChainPositions &positions) {
VocabReconstitute vocab(vocab_fd_);
util::FileStream out(out_fd_);
out << "\\data\\\n";
for (size_t i = 0; i < positions.size(); ++i) {
out << "ngram " << (i+1) << '=' << counts_[i] << '\n';
}
out << '\n';
for (unsigned order = 1; order < positions.size(); ++order) {
out << "\\" << order << "-grams:" << '\n';
for (ProxyStream<NGram<ProbBackoff> > stream(positions[order - 1], NGram<ProbBackoff>(NULL, order)); stream; ++stream) {
PrintLead(vocab, stream, out);
out << '\t' << stream->Value().backoff << '\n';
}
out << '\n';
}
out << "\\" << positions.size() << "-grams:" << '\n';
for (ProxyStream<NGram<Prob> > stream(positions.back(), NGram<Prob>(NULL, positions.size())); stream; ++stream) {
PrintLead(vocab, stream, out);
out << '\n';
}
out << '\n';
out << "\\end\\\n";
}
} // namespace lm
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/script/relabel.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_SCRIPT_RELABEL_H_
#define FST_SCRIPT_RELABEL_H_
#include <algorithm>
#include <tuple>
#include <utility>
#include <vector>
#include <fst/relabel.h>
#include <fst/script/fst-class.h>
namespace fst {
namespace script {
using RelabelArgs1 = std::tuple<MutableFstClass *, const SymbolTable *,
const SymbolTable *, const string &, bool,
const SymbolTable *, const SymbolTable *,
const string &, bool>;
template <class Arc>
void Relabel(RelabelArgs1 *args) {
MutableFst<Arc> *ofst = std::get<0>(*args)->GetMutableFst<Arc>();
Relabel(ofst, std::get<1>(*args), std::get<2>(*args), std::get<3>(*args),
std::get<4>(*args), std::get<5>(*args), std::get<6>(*args),
std::get<7>(*args), std::get<8>(*args));
}
using LabelPair = std::pair<int64_t, int64_t>;
using RelabelArgs2 = std::tuple<MutableFstClass *,
const std::vector<LabelPair> &,
const std::vector<LabelPair> &>;
template <class Arc>
void Relabel(RelabelArgs2 *args) {
MutableFst<Arc> *ofst = std::get<0>(*args)->GetMutableFst<Arc>();
using LabelPair = std::pair<typename Arc::Label, typename Arc::Label>;
// In case the MutableFstClass::Label is not the same as Arc::Label,
// make a copy.
std::vector<LabelPair> typed_ipairs(std::get<1>(*args).size());
std::copy(std::get<1>(*args).begin(), std::get<1>(*args).end(),
typed_ipairs.begin());
std::vector<LabelPair> typed_opairs(std::get<2>(*args).size());
std::copy(std::get<2>(*args).begin(), std::get<2>(*args).end(),
typed_opairs.begin());
Relabel(ofst, typed_ipairs, typed_opairs);
}
void Relabel(MutableFstClass *ofst,
const SymbolTable *old_isymbols, const SymbolTable *new_isymbols,
const string &unknown_isymbol, bool attach_new_isymbols,
const SymbolTable *old_osymbols, const SymbolTable *new_osymbols,
const string &unknown_osymbol, bool attach_new_osymbols);
void Relabel(MutableFstClass *ofst, const std::vector<LabelPair> &ipairs,
const std::vector<LabelPair> &opairs);
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_RELABEL_H_
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/bin/fstshortestdistance-main.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Find shortest distances in an FST.
#include <cstring>
#include <memory>
#include <string>
#include <vector>
#include <fst/flags.h>
#include <fst/log.h>
#include <fst/script/getters.h>
#include <fst/script/shortest-distance.h>
#include <fst/script/text-io.h>
DECLARE_bool(reverse);
DECLARE_double(delta);
DECLARE_int64(nstate);
DECLARE_string(queue_type);
int fstshortestdistance_main(int argc, char **argv) {
namespace s = fst::script;
using fst::script::FstClass;
using fst::script::WeightClass;
using fst::QueueType;
using fst::AUTO_QUEUE;
string usage = "Finds shortest distance(s) in an FST.\n\n Usage: ";
usage += argv[0];
usage += " [in.fst [distance.txt]]\n";
std::set_new_handler(FailedNewHandler);
SET_FLAGS(usage.c_str(), &argc, &argv, true);
if (argc > 3) {
ShowUsage();
return 1;
}
string in_name = (argc > 1 && (strcmp(argv[1], "-") != 0)) ? argv[1] : "";
string out_name = argc > 2 ? argv[2] : "";
std::unique_ptr<FstClass> ifst(FstClass::Read(in_name));
if (!ifst) return 1;
std::vector<WeightClass> distance;
QueueType queue_type;
if (!s::GetQueueType(FLAGS_queue_type, &queue_type)) {
LOG(ERROR) << argv[0]
<< ": Unknown or unsupported queue type: " << FLAGS_queue_type;
return 1;
}
if (FLAGS_reverse && queue_type != AUTO_QUEUE) {
LOG(ERROR) << argv[0] << ": Can't use non-default queue with reverse";
return 1;
}
if (FLAGS_reverse) {
s::ShortestDistance(*ifst, &distance, FLAGS_reverse, FLAGS_delta);
} else {
const s::ShortestDistanceOptions opts(queue_type, s::ANY_ARC_FILTER,
FLAGS_nstate, FLAGS_delta);
s::ShortestDistance(*ifst, &distance, opts);
}
return !s::WritePotentials(out_name, distance);
}
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/node-gyp-package.sh | #!/bin/bash
set -xe
source $(dirname "$0")/tc-tests-utils.sh
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
cd $DS_ROOT_TASK/node-gyp-cache/ && tar -czf ${TASKCLUSTER_ARTIFACTS}/node-gyp-cache.tar.gz .
| 0 |
coqui_public_repos/stt-model-manager | coqui_public_repos/stt-model-manager/src/index.css | body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
monospace;
}
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/signed-log-weight.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// LogWeight along with sign information that represents the value X in the
// linear domain as <sign(X), -ln(|X|)>
//
// The sign is a TropicalWeight:
// positive, TropicalWeight.Value() > 0.0, recommended value 1.0
// negative, TropicalWeight.Value() <= 0.0, recommended value -1.0
#ifndef FST_SIGNED_LOG_WEIGHT_H_
#define FST_SIGNED_LOG_WEIGHT_H_
#include <cstdlib>
#include <fst/float-weight.h>
#include <fst/pair-weight.h>
#include <fst/product-weight.h>
namespace fst {
template <class T>
class SignedLogWeightTpl : public PairWeight<TropicalWeight, LogWeightTpl<T>> {
public:
using X1 = TropicalWeight;
using X2 = LogWeightTpl<T>;
using ReverseWeight = SignedLogWeightTpl;
using PairWeight<X1, X2>::Value1;
using PairWeight<X1, X2>::Value2;
SignedLogWeightTpl() : PairWeight<X1, X2>() {}
SignedLogWeightTpl(const SignedLogWeightTpl &w) : PairWeight<X1, X2>(w) {}
explicit SignedLogWeightTpl(const PairWeight<X1, X2> &w)
: PairWeight<X1, X2>(w) {}
SignedLogWeightTpl(const X1 &x1, const X2 &x2) : PairWeight<X1, X2>(x1, x2) {}
static const SignedLogWeightTpl &Zero() {
static const SignedLogWeightTpl zero(X1(1.0), X2::Zero());
return zero;
}
static const SignedLogWeightTpl &One() {
static const SignedLogWeightTpl one(X1(1.0), X2::One());
return one;
}
static const SignedLogWeightTpl &NoWeight() {
static const SignedLogWeightTpl no_weight(X1(1.0), X2::NoWeight());
return no_weight;
}
static const string &Type() {
static const string *const type =
new string("signed_log_" + X1::Type() + "_" + X2::Type());
return *type;
}
SignedLogWeightTpl Quantize(float delta = kDelta) const {
return SignedLogWeightTpl(PairWeight<X1, X2>::Quantize(delta));
}
ReverseWeight Reverse() const {
return SignedLogWeightTpl(PairWeight<X1, X2>::Reverse());
}
bool Member() const { return PairWeight<X1, X2>::Member(); }
// Neither idempotent nor path.
static constexpr uint64 Properties() {
return kLeftSemiring | kRightSemiring | kCommutative;
}
size_t Hash() const {
size_t h1;
if (Value2() == X2::Zero() || Value1().Value() > 0.0) {
h1 = TropicalWeight(1.0).Hash();
} else {
h1 = TropicalWeight(-1.0).Hash();
}
size_t h2 = Value2().Hash();
static constexpr int lshift = 5;
static constexpr int rshift = CHAR_BIT * sizeof(size_t) - 5;
return h1 << lshift ^ h1 >> rshift ^ h2;
}
};
template <class T>
inline SignedLogWeightTpl<T> Plus(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2) {
using X1 = TropicalWeight;
using X2 = LogWeightTpl<T>;
if (!w1.Member() || !w2.Member()) return SignedLogWeightTpl<T>::NoWeight();
const auto s1 = w1.Value1().Value() > 0.0;
const auto s2 = w2.Value1().Value() > 0.0;
const bool equal = (s1 == s2);
const auto f1 = w1.Value2().Value();
const auto f2 = w2.Value2().Value();
if (f1 == FloatLimits<T>::PosInfinity()) {
return w2;
} else if (f2 == FloatLimits<T>::PosInfinity()) {
return w1;
} else if (f1 == f2) {
if (equal) {
return SignedLogWeightTpl<T>(X1(w1.Value1()), X2(f2 - log(2.0F)));
} else {
return SignedLogWeightTpl<T>::Zero();
}
} else if (f1 > f2) {
if (equal) {
return SignedLogWeightTpl<T>(X1(w1.Value1()),
X2(f2 - internal::LogPosExp(f1 - f2)));
} else {
return SignedLogWeightTpl<T>(X1(w2.Value1()),
X2((f2 - internal::LogNegExp(f1 - f2))));
}
} else {
if (equal) {
return SignedLogWeightTpl<T>(X1(w2.Value1()),
X2((f1 - internal::LogPosExp(f2 - f1))));
} else {
return SignedLogWeightTpl<T>(X1(w1.Value1()),
X2((f1 - internal::LogNegExp(f2 - f1))));
}
}
}
template <class T>
inline SignedLogWeightTpl<T> Minus(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2) {
SignedLogWeightTpl<T> minus_w2(-w2.Value1().Value(), w2.Value2());
return Plus(w1, minus_w2);
}
template <class T>
inline SignedLogWeightTpl<T> Times(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2) {
using X2 = LogWeightTpl<T>;
if (!w1.Member() || !w2.Member()) return SignedLogWeightTpl<T>::NoWeight();
const auto s1 = w1.Value1().Value() > 0.0;
const auto s2 = w2.Value1().Value() > 0.0;
const auto f1 = w1.Value2().Value();
const auto f2 = w2.Value2().Value();
if (s1 == s2) {
return SignedLogWeightTpl<T>(TropicalWeight(1.0), X2(f1 + f2));
} else {
return SignedLogWeightTpl<T>(TropicalWeight(-1.0), X2(f1 + f2));
}
}
template <class T>
inline SignedLogWeightTpl<T> Divide(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2,
DivideType typ = DIVIDE_ANY) {
using X2 = LogWeightTpl<T>;
if (!w1.Member() || !w2.Member()) return SignedLogWeightTpl<T>::NoWeight();
const auto s1 = w1.Value1().Value() > 0.0;
const auto s2 = w2.Value1().Value() > 0.0;
const auto f1 = w1.Value2().Value();
const auto f2 = w2.Value2().Value();
if (f2 == FloatLimits<T>::PosInfinity()) {
return SignedLogWeightTpl<T>(TropicalWeight(1.0),
X2(FloatLimits<T>::NumberBad()));
} else if (f1 == FloatLimits<T>::PosInfinity()) {
return SignedLogWeightTpl<T>(TropicalWeight(1.0),
X2(FloatLimits<T>::PosInfinity()));
} else if (s1 == s2) {
return SignedLogWeightTpl<T>(TropicalWeight(1.0), X2(f1 - f2));
} else {
return SignedLogWeightTpl<T>(TropicalWeight(-1.0), X2(f1 - f2));
}
}
template <class T>
inline bool ApproxEqual(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2, float delta = kDelta) {
const auto s1 = w1.Value1().Value() > 0.0;
const auto s2 = w2.Value1().Value() > 0.0;
if (s1 == s2) {
return ApproxEqual(w1.Value2(), w2.Value2(), delta);
} else {
return w1.Value2() == LogWeightTpl<T>::Zero() &&
w2.Value2() == LogWeightTpl<T>::Zero();
}
}
template <class T>
inline bool operator==(const SignedLogWeightTpl<T> &w1,
const SignedLogWeightTpl<T> &w2) {
const auto s1 = w1.Value1().Value() > 0.0;
const auto s2 = w2.Value1().Value() > 0.0;
if (s1 == s2) {
return w1.Value2() == w2.Value2();
} else {
return (w1.Value2() == LogWeightTpl<T>::Zero()) &&
(w2.Value2() == LogWeightTpl<T>::Zero());
}
}
// Single-precision signed-log weight.
using SignedLogWeight = SignedLogWeightTpl<float>;
// Double-precision signed-log weight.
using SignedLog64Weight = SignedLogWeightTpl<double>;
template <class W1, class W2>
bool SignedLogConvertCheck(W1 weight) {
if (weight.Value1().Value() < 0.0) {
FSTERROR() << "WeightConvert: Can't convert weight " << weight
<< " from " << W1::Type() << " to " << W2::Type();
return false;
}
return true;
}
// Specialization using the Kahan compensated summation
template <class T>
class Adder<SignedLogWeightTpl<T>> {
public:
using Weight = SignedLogWeightTpl<T>;
using X1 = TropicalWeight;
using X2 = LogWeightTpl<T>;
explicit Adder(Weight w = Weight::Zero())
: ssum_(w.Value1().Value() > 0.0),
sum_(w.Value2().Value()),
c_(0.0) { }
Weight Add(const Weight &w) {
const auto sw = w.Value1().Value() > 0.0;
const auto f = w.Value2().Value();
const bool equal = (ssum_ == sw);
if (!Sum().Member() || f == FloatLimits<T>::PosInfinity()) {
return Sum();
} else if (!w.Member() || sum_ == FloatLimits<T>::PosInfinity()) {
sum_ = f;
ssum_ = sw;
c_ = 0.0;
} else if (f == sum_) {
if (equal) {
sum_ = internal::KahanLogSum(sum_, f, &c_);
} else {
sum_ = FloatLimits<T>::PosInfinity();
ssum_ = true;
c_ = 0.0;
}
} else if (f > sum_) {
if (equal) {
sum_ = internal::KahanLogSum(sum_, f, &c_);
} else {
sum_ = internal::KahanLogDiff(sum_, f, &c_);
}
} else {
if (equal) {
sum_ = internal::KahanLogSum(f, sum_, &c_);
} else {
sum_ = internal::KahanLogDiff(f, sum_, &c_);
ssum_ = sw;
}
}
return Sum();
}
Weight Sum() { return Weight(X1(ssum_ ? 1.0 : -1.0), X2(sum_)); }
void Reset(Weight w = Weight::Zero()) {
ssum_ = w.Value1().Value() > 0.0;
sum_ = w.Value2().Value();
c_ = 0.0;
}
private:
bool ssum_; // true iff sign of sum is positive
double sum_; // unsigned sum
double c_; // Kahan compensation
};
// Converts to tropical.
template <>
struct WeightConvert<SignedLogWeight, TropicalWeight> {
TropicalWeight operator()(const SignedLogWeight &weight) const {
if (!SignedLogConvertCheck<SignedLogWeight, TropicalWeight>(weight)) {
return TropicalWeight::NoWeight();
}
return TropicalWeight(weight.Value2().Value());
}
};
template <>
struct WeightConvert<SignedLog64Weight, TropicalWeight> {
TropicalWeight operator()(const SignedLog64Weight &weight) const {
if (!SignedLogConvertCheck<SignedLog64Weight, TropicalWeight>(weight)) {
return TropicalWeight::NoWeight();
}
return TropicalWeight(weight.Value2().Value());
}
};
// Converts to log.
template <>
struct WeightConvert<SignedLogWeight, LogWeight> {
LogWeight operator()(const SignedLogWeight &weight) const {
if (!SignedLogConvertCheck<SignedLogWeight, LogWeight>(weight)) {
return LogWeight::NoWeight();
}
return LogWeight(weight.Value2().Value());
}
};
template <>
struct WeightConvert<SignedLog64Weight, LogWeight> {
LogWeight operator()(const SignedLog64Weight &weight) const {
if (!SignedLogConvertCheck<SignedLog64Weight, LogWeight>(weight)) {
return LogWeight::NoWeight();
}
return LogWeight(weight.Value2().Value());
}
};
// Converts to log64.
template <>
struct WeightConvert<SignedLogWeight, Log64Weight> {
Log64Weight operator()(const SignedLogWeight &weight) const {
if (!SignedLogConvertCheck<SignedLogWeight, Log64Weight>(weight)) {
return Log64Weight::NoWeight();
}
return Log64Weight(weight.Value2().Value());
}
};
template <>
struct WeightConvert<SignedLog64Weight, Log64Weight> {
Log64Weight operator()(const SignedLog64Weight &weight) const {
if (!SignedLogConvertCheck<SignedLog64Weight, Log64Weight>(weight)) {
return Log64Weight::NoWeight();
}
return Log64Weight(weight.Value2().Value());
}
};
// Converts to signed log.
template <>
struct WeightConvert<TropicalWeight, SignedLogWeight> {
SignedLogWeight operator()(const TropicalWeight &weight) const {
return SignedLogWeight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<LogWeight, SignedLogWeight> {
SignedLogWeight operator()(const LogWeight &weight) const {
return SignedLogWeight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<Log64Weight, SignedLogWeight> {
SignedLogWeight operator()(const Log64Weight &weight) const {
return SignedLogWeight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<SignedLog64Weight, SignedLogWeight> {
SignedLogWeight operator()(const SignedLog64Weight &weight) const {
return SignedLogWeight(weight.Value1(), weight.Value2().Value());
}
};
// Converts to signed log64.
template <>
struct WeightConvert<TropicalWeight, SignedLog64Weight> {
SignedLog64Weight operator()(const TropicalWeight &weight) const {
return SignedLog64Weight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<LogWeight, SignedLog64Weight> {
SignedLog64Weight operator()(const LogWeight &weight) const {
return SignedLog64Weight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<Log64Weight, SignedLog64Weight> {
SignedLog64Weight operator()(const Log64Weight &weight) const {
return SignedLog64Weight(1.0, weight.Value());
}
};
template <>
struct WeightConvert<SignedLogWeight, SignedLog64Weight> {
SignedLog64Weight operator()(const SignedLogWeight &weight) const {
return SignedLog64Weight(weight.Value1(), weight.Value2().Value());
}
};
// This function object returns SignedLogWeightTpl<T>'s that are random integers
// chosen from [0, num_random_weights) times a random sign. This is intended
// primarily for testing.
template <class T>
class WeightGenerate<SignedLogWeightTpl<T>> {
public:
using Weight = SignedLogWeightTpl<T>;
using X1 = typename Weight::X1;
using X2 = typename Weight::X2;
explicit WeightGenerate(bool allow_zero = true,
size_t num_random_weights = kNumRandomWeights)
: allow_zero_(allow_zero), num_random_weights_(num_random_weights) {}
Weight operator()() const {
static const X1 negative_one(-1.0);
static const X1 positive_one(+1.0);
const int m = rand() % 2; // NOLINT
const int n = rand() % (num_random_weights_ + allow_zero_); // NOLINT
return Weight((m == 0) ? negative_one : positive_one,
(allow_zero_ && n == num_random_weights_) ?
X2::Zero() : X2(n));
}
private:
// Permits Zero() and zero divisors.
const bool allow_zero_;
// Number of alternative random weights.
const size_t num_random_weights_;
};
} // namespace fst
#endif // FST_SIGNED_LOG_WEIGHT_H_
| 0 |
coqui_public_repos/open-bible-scripts | coqui_public_repos/open-bible-scripts/data/asante-twi.txt | https://downloads.open.bible/text/at/twiONA20/twiONA20_SFM.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_timingfiles.zip
https://ebible.org/Scriptures/aka_readaloud.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_GEN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_EXO_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_LEV_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_NUM_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_DEU_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JOS_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JDG_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_RUT_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1SA_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2SA_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1KI_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2KI_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1CH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2CH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_EZR_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_NEH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_EST_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JOB_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_PSA_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_PRO_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ECC_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_SNG_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ISA_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JER_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_LAM_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_EZK_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_DAN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_HOS_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JOL_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_AMO_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_OBA_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JON_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_MIC_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_NAM_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_HAB_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ZEP_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_HAG_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ZEC_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_MAL_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_MAT_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_MRK_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_LUK_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JHN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ACT_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_ROM_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1CO_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2CO_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_GAL_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_EPH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_PHP_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_COL_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1TH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2TH_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1TI_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2TI_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_TIT_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_PHM_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_HEB_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JAS_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1PE_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2PE_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_1JN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_2JN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_3JN_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_JUD_wav.zip
https://downloads.open.bible/audio/at/twiONA20/twiONA20_REV_wav.zip
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/tc-cpp_tflite-ds-tests.sh | #!/bin/bash
set -xe
source $(dirname "$0")/tc-tests-utils.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_TEST_MODEL//.pb/.tflite}
model_name=$(basename "${model_source}")
model_name_mmap=$(basename "${model_source}")
export DATA_TMP_DIR=${TASKCLUSTER_TMP_DIR}
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_all_inference_tests
run_multi_inference_tests
run_cpp_only_inference_tests
run_hotword_tests
| 0 |
coqui_public_repos/Trainer | coqui_public_repos/Trainer/trainer/generic_utils.py | # -*- coding: utf-8 -*-
import datetime
import os
import subprocess
import fsspec
import torch
from trainer.logger import logger
def isimplemented(obj, method_name):
"""Check if a method is implemented in a class."""
if method_name in dir(obj) and callable(getattr(obj, method_name)):
try:
obj.__getattribute__(method_name)() # pylint: disable=bad-option-value, unnecessary-dunder-call
except NotImplementedError:
return False
except: # pylint: disable=bare-except
return True
return True
return False
def to_cuda(x: torch.Tensor) -> torch.Tensor:
if x is None:
return None
if torch.is_tensor(x):
x = x.contiguous()
if torch.cuda.is_available():
x = x.cuda(non_blocking=True)
return x
def get_cuda():
use_cuda = torch.cuda.is_available()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
return use_cuda, device
def get_git_branch():
try:
out = subprocess.check_output(["git", "branch"]).decode("utf8")
current = next(line for line in out.split("\n") if line.startswith("*"))
current.replace("* ", "")
except subprocess.CalledProcessError:
current = "inside_docker"
except FileNotFoundError:
current = "unknown"
return current
def get_commit_hash():
"""https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script"""
try:
commit = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode().strip()
# Not copying .git folder into docker container
except (subprocess.CalledProcessError, FileNotFoundError):
commit = "0000000"
return commit
def get_experiment_folder_path(root_path, model_name):
"""Get an experiment folder path with the current date and time"""
date_str = datetime.datetime.now().strftime("%B-%d-%Y_%I+%M%p")
commit_hash = get_commit_hash()
output_folder = os.path.join(root_path, model_name + "-" + date_str + "-" + commit_hash)
return output_folder
def remove_experiment_folder(experiment_path):
"""Check folder if there is a checkpoint, otherwise remove the folder"""
fs = fsspec.get_mapper(experiment_path).fs
checkpoint_files = fs.glob(experiment_path + "/*.pth")
if not checkpoint_files:
if fs.exists(experiment_path):
fs.rm(experiment_path, recursive=True)
logger.info(" ! Run is removed from %s", experiment_path)
else:
logger.info(" ! Run is kept in %s", experiment_path)
def count_parameters(model):
r"""Count number of trainable parameters in a network"""
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def set_partial_state_dict(model_dict, checkpoint_state, c):
# Partial initialization: if there is a mismatch with new and old layer, it is skipped.
for k, v in checkpoint_state.items():
if k not in model_dict:
logger.info(" | > Layer missing in the model definition: %s", k)
for k in model_dict:
if k not in checkpoint_state:
logger.info(" | > Layer missing in the checkpoint: %s", k)
for k, v in checkpoint_state.items():
if k in model_dict and v.numel() != model_dict[k].numel():
logger.info(" | > Layer dimention missmatch between model definition and checkpoint: %s", k)
# 1. filter out unnecessary keys
pretrained_dict = {k: v for k, v in checkpoint_state.items() if k in model_dict}
# 2. filter out different size layers
pretrained_dict = {k: v for k, v in pretrained_dict.items() if v.numel() == model_dict[k].numel()}
# 3. skip reinit layers
if c.has("reinit_layers") and c.reinit_layers is not None:
for reinit_layer_name in c.reinit_layers:
pretrained_dict = {k: v for k, v in pretrained_dict.items() if reinit_layer_name not in k}
# 4. overwrite entries in the existing state dict
model_dict.update(pretrained_dict)
logger.info(" | > %i / %i layers are restored.", len(pretrained_dict), len(model_dict))
return model_dict
class KeepAverage:
def __init__(self):
self.avg_values = {}
self.iters = {}
def __getitem__(self, key):
return self.avg_values[key]
def items(self):
return self.avg_values.items()
def add_value(self, name, init_val=0, init_iter=0):
self.avg_values[name] = init_val
self.iters[name] = init_iter
def update_value(self, name, value, weighted_avg=False):
if name not in self.avg_values:
# add value if not exist before
self.add_value(name, init_val=value)
else:
# else update existing value
if weighted_avg:
self.avg_values[name] = 0.99 * self.avg_values[name] + 0.01 * value
self.iters[name] += 1
else:
self.avg_values[name] = self.avg_values[name] * self.iters[name] + value
self.iters[name] += 1
self.avg_values[name] /= self.iters[name]
def add_values(self, name_dict):
for key, value in name_dict.items():
self.add_value(key, init_val=value)
def update_values(self, value_dict):
for key, value in value_dict.items():
self.update_value(key, value)
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/far/farisomorphic.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Tests if two Far files contains isomorphic (key,fst) pairs.
#include <string>
#include <fst/flags.h>
#include <fst/extensions/far/farscript.h>
#include <fst/extensions/far/getters.h>
DEFINE_string(begin_key, "",
"First key to extract (def: first key in archive)");
DEFINE_string(end_key, "", "Last key to extract (def: last key in archive)");
DEFINE_double(delta, fst::kDelta, "Comparison/quantization delta");
int main(int argc, char **argv) {
namespace s = fst::script;
string usage = "Compares the FSTs in two FST archives for isomorphism.";
usage += "\n\n Usage:";
usage += argv[0];
usage += " in1.far in2.far\n";
usage += " Flags: begin_key end_key";
std::set_new_handler(FailedNewHandler);
SET_FLAGS(usage.c_str(), &argc, &argv, true);
s::ExpandArgs(argc, argv, &argc, &argv);
if (argc != 3) {
ShowUsage();
return 1;
}
const auto arc_type = s::LoadArcTypeFromFar(argv[1]);
if (arc_type.empty()) return 1;
bool result = s::FarIsomorphic(argv[1], argv[2], arc_type,
FLAGS_delta, FLAGS_begin_key, FLAGS_end_key);
if (!result) VLOG(1) << "FARs are not isomorphic.";
return result ? 0 : 2;
}
| 0 |
coqui_public_repos/STT/native_client/kenlm/lm | coqui_public_repos/STT/native_client/kenlm/lm/interpolate/tune_matrix.hh | #ifndef LM_INTERPOLATE_TUNE_MATRIX_H
#define LM_INTERPOLATE_TUNE_MATRIX_H
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpragmas" // Older gcc doesn't have "-Wunused-local-typedefs" and complains.
#pragma GCC diagnostic ignored "-Wunused-local-typedefs"
#include <Eigen/Core>
#pragma GCC diagnostic pop
namespace lm { namespace interpolate {
typedef Eigen::MatrixXf Matrix;
typedef Eigen::VectorXf Vector;
typedef Matrix::Scalar Accum;
}} // namespaces
#endif // LM_INTERPOLATE_TUNE_MATRIX_H
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/pdt/pdtreplace.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Converts an RTN represented by FSTs and non-terminal labels into a PDT.
#include <cstring>
#include <string>
#include <vector>
#include <fst/flags.h>
#include <fst/extensions/pdt/getters.h>
#include <fst/extensions/pdt/pdtscript.h>
#include <fst/util.h>
#include <fst/vector-fst.h>
DEFINE_string(pdt_parentheses, "", "PDT parenthesis label pairs");
DEFINE_string(pdt_parser_type, "left",
"Construction method, one of: \"left\", \"left_sr\"");
DEFINE_int64(start_paren_labels, fst::kNoLabel,
"Index to use for the first inserted parentheses; if not "
"specified, the next available label beyond the highest output "
"label is used");
DEFINE_string(left_paren_prefix, "(_", "Prefix to attach to SymbolTable "
"labels for inserted left parentheses");
DEFINE_string(right_paren_prefix, ")_", "Prefix to attach to SymbolTable "
"labels for inserted right parentheses");
void Cleanup(std::vector<fst::script::LabelFstClassPair> *pairs) {
for (const auto &pair : *pairs) {
delete pair.second;
}
pairs->clear();
}
int main(int argc, char **argv) {
namespace s = fst::script;
using fst::script::FstClass;
using fst::script::VectorFstClass;
using fst::PdtParserType;
using fst::WriteLabelPairs;
string usage = "Converts an RTN represented by FSTs";
usage += " and non-terminal labels into PDT.\n\n Usage: ";
usage += argv[0];
usage += " root.fst rootlabel [rule1.fst label1 ...] [out.fst]\n";
std::set_new_handler(FailedNewHandler);
SET_FLAGS(usage.c_str(), &argc, &argv, true);
if (argc < 4) {
ShowUsage();
return 1;
}
const string in_name = argv[1];
const string out_name = argc % 2 == 0 ? argv[argc - 1] : "";
auto *ifst = FstClass::Read(in_name);
if (!ifst) return 1;
PdtParserType parser_type;
if (!s::GetPdtParserType(FLAGS_pdt_parser_type, &parser_type)) {
LOG(ERROR) << argv[0] << ": Unknown PDT parser type: "
<< FLAGS_pdt_parser_type;
delete ifst;
return 1;
}
std::vector<s::LabelFstClassPair> pairs;
// Note that if the root label is beyond the range of the underlying FST's
// labels, truncation will occur.
const auto root = atoll(argv[2]);
pairs.emplace_back(root, ifst);
for (auto i = 3; i < argc - 1; i += 2) {
ifst = FstClass::Read(argv[i]);
if (!ifst) {
Cleanup(&pairs);
return 1;
}
// Note that if the root label is beyond the range of the underlying FST's
// labels, truncation will occur.
const auto label = atoll(argv[i + 1]);
pairs.emplace_back(label, ifst);
}
VectorFstClass ofst(ifst->ArcType());
std::vector<s::LabelPair> parens;
s::PdtReplace(pairs, &ofst, &parens, root, parser_type,
FLAGS_start_paren_labels, FLAGS_left_paren_prefix,
FLAGS_right_paren_prefix);
Cleanup(&pairs);
if (!FLAGS_pdt_parentheses.empty()) {
if (!WriteLabelPairs(FLAGS_pdt_parentheses, parens)) return 1;
}
ofst.Write(out_name);
return 0;
}
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/tc-cpp_tflite_basic-ds-tests.sh | #!/bin/bash
set -xe
source $(dirname "$0")/tc-tests-utils.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_TEST_MODEL//.pb/.tflite}
model_name=$(basename "${model_source}")
export DATA_TMP_DIR=${TASKCLUSTER_TMP_DIR}
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_tflite_basic_inference_tests
| 0 |
coqui_public_repos/STT-models/hindi/bozden | coqui_public_repos/STT-models/hindi/bozden/v0.8.99/LICENSE | https://creativecommons.org/licenses/by-nc-sa/3.0/ | 0 |
coqui_public_repos/TTS/tests | coqui_public_repos/TTS/tests/inputs/xtts_vocab.json | {
"version": "1.0",
"truncation": null,
"padding": null,
"added_tokens": [
{
"id": 0,
"special": true,
"content": "[STOP]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 1,
"special": true,
"content": "[UNK]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 2,
"special": true,
"content": "[SPACE]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 259,
"special": true,
"content": "[en]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 260,
"special": true,
"content": "[de]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 261,
"special": true,
"content": "[START]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 262,
"special": true,
"content": "[fr]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 284,
"special": true,
"content": "[es]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 285,
"special": true,
"content": "[it]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 286,
"special": true,
"content": "[pt]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 294,
"special": true,
"content": "[pl]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 295,
"special": true,
"content": "[tr]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 267,
"special": true,
"content": "[ru]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 293,
"special": true,
"content": "[cs]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 297,
"special": true,
"content": "[nl]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 5022,
"special": true,
"content": "[ar]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 5023,
"special": true,
"content": "[zh-cn]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
},
{
"id": 5412,
"special": true,
"content": "[ja]",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false
}
],
"normalizer": null,
"pre_tokenizer": {
"type": "Whitespace"
},
"post_processor": null,
"decoder": null,
"model": {
"type": "BPE",
"dropout": null,
"unk_token": "[UNK]",
"continuing_subword_prefix": null,
"end_of_word_suffix": null,
"fuse_unk": false,
"vocab": {
"[STOP]": 0,
"[UNK]": 1,
"[SPACE]": 2,
"!": 3,
"'": 4,
"(": 5,
")": 6,
",": 7,
"-": 8,
".": 9,
"/": 10,
":": 11,
";": 12,
"?": 13,
"a": 14,
"b": 15,
"c": 16,
"d": 17,
"e": 18,
"f": 19,
"g": 20,
"h": 21,
"i": 22,
"j": 23,
"k": 24,
"l": 25,
"m": 26,
"n": 27,
"o": 28,
"p": 29,
"q": 30,
"r": 31,
"s": 32,
"t": 33,
"u": 34,
"v": 35,
"w": 36,
"x": 37,
"y": 38,
"z": 39,
"th": 40,
"in": 41,
"the": 42,
"an": 43,
"er": 44,
"ou": 45,
"re": 46,
"on": 47,
"at": 48,
"ed": 49,
"en": 50,
"to": 51,
"ing": 52,
"and": 53,
"is": 54,
"as": 55,
"al": 56,
"or": 57,
"of": 58,
"ar": 59,
"it": 60,
"es": 61,
"he": 62,
"st": 63,
"le": 64,
"om": 65,
"se": 66,
"be": 67,
"ad": 68,
"ow": 69,
"ly": 70,
"ch": 71,
"wh": 72,
"that": 73,
"you": 74,
"li": 75,
"ve": 76,
"ac": 77,
"ti": 78,
"ld": 79,
"me": 80,
"was": 81,
"gh": 82,
"id": 83,
"ll": 84,
"wi": 85,
"ent": 86,
"for": 87,
"ay": 88,
"ro": 89,
"ver": 90,
"ic": 91,
"her": 92,
"ke": 93,
"his": 94,
"no": 95,
"ut": 96,
"un": 97,
"ir": 98,
"lo": 99,
"we": 100,
"ri": 101,
"ha": 102,
"with": 103,
"ght": 104,
"out": 105,
"im": 106,
"ion": 107,
"all": 108,
"ab": 109,
"one": 110,
"ne": 111,
"ge": 112,
"ould": 113,
"ter": 114,
"mo": 115,
"had": 116,
"ce": 117,
"she": 118,
"go": 119,
"sh": 120,
"ur": 121,
"am": 122,
"so": 123,
"pe": 124,
"my": 125,
"de": 126,
"are": 127,
"but": 128,
"ome": 129,
"fr": 130,
"ther": 131,
"fe": 132,
"su": 133,
"do": 134,
"con": 135,
"te": 136,
"ain": 137,
"ere": 138,
"po": 139,
"if": 140,
"they": 141,
"us": 142,
"ag": 143,
"tr": 144,
"now": 145,
"oun": 146,
"this": 147,
"have": 148,
"not": 149,
"sa": 150,
"il": 151,
"up": 152,
"thing": 153,
"from": 154,
"ap": 155,
"him": 156,
"ack": 157,
"ation": 158,
"ant": 159,
"our": 160,
"op": 161,
"like": 162,
"ust": 163,
"ess": 164,
"bo": 165,
"ok": 166,
"ul": 167,
"ind": 168,
"ex": 169,
"com": 170,
"some": 171,
"there": 172,
"ers": 173,
"co": 174,
"res": 175,
"man": 176,
"ard": 177,
"pl": 178,
"wor": 179,
"way": 180,
"tion": 181,
"fo": 182,
"ca": 183,
"were": 184,
"by": 185,
"ate": 186,
"pro": 187,
"ted": 188,
"ound": 189,
"own": 190,
"would": 191,
"ts": 192,
"what": 193,
"qu": 194,
"ally": 195,
"ight": 196,
"ck": 197,
"gr": 198,
"when": 199,
"ven": 200,
"can": 201,
"ough": 202,
"ine": 203,
"end": 204,
"per": 205,
"ous": 206,
"od": 207,
"ide": 208,
"know": 209,
"ty": 210,
"very": 211,
"si": 212,
"ak": 213,
"who": 214,
"about": 215,
"ill": 216,
"them": 217,
"est": 218,
"red": 219,
"ye": 220,
"could": 221,
"ong": 222,
"your": 223,
"their": 224,
"em": 225,
"just": 226,
"other": 227,
"into": 228,
"any": 229,
"whi": 230,
"um": 231,
"tw": 232,
"ast": 233,
"der": 234,
"did": 235,
"ie": 236,
"been": 237,
"ace": 238,
"ink": 239,
"ity": 240,
"back": 241,
"ting": 242,
"br": 243,
"more": 244,
"ake": 245,
"pp": 246,
"then": 247,
"sp": 248,
"el": 249,
"use": 250,
"bl": 251,
"said": 252,
"over": 253,
"get": 254,
"ß": 255,
"ä": 256,
"ö": 257,
"ü": 258,
"[en]": 259,
"[de]": 260,
"[START]": 261,
"[fr]": 262,
"œ": 263,
"ï": 264,
"ê": 265,
"â": 266,
"[ru]": 267,
"ÿ": 268,
"è": 269,
"à": 270,
"ë": 271,
"ù": 272,
"î": 273,
"ç": 274,
"æ": 275,
"ô": 276,
"û": 277,
"á": 278,
"é": 279,
"í": 280,
"ó": 281,
"ú": 282,
"ñ": 283,
"[es]": 284,
"[it]": 285,
"[pt]": 286,
"ń": 287,
"ś": 288,
"ę": 289,
"ą": 290,
"ż": 291,
"ć": 292,
"[cs]": 293,
"[pl]": 294,
"[tr]": 295,
"ã": 296,
"[nl]": 297,
"ş": 298,
"ğ": 299,
"ı": 300,
"ò": 301,
"ì": 302,
"¿": 303,
"…": 304,
"i̇": 305,
"õ": 306,
"\"": 307,
"´": 308,
"ø": 309,
"č": 310,
"ō": 311,
"š": 312,
"ž": 313,
"̇": 314,
"ei": 315,
"ich": 316,
"ein": 317,
"au": 318,
"sch": 319,
"und": 320,
"die": 321,
"da": 322,
"den": 323,
"gen": 324,
"zu": 325,
"hr": 326,
"ten": 327,
"mi": 328,
"sie": 329,
"das": 330,
"eine": 331,
"icht": 332,
"ber": 333,
"ach": 334,
"auf": 335,
"lich": 336,
"nicht": 337,
"mm": 338,
"ben": 339,
"war": 340,
"mit": 341,
"sich": 342,
"ig": 343,
"aus": 344,
"ist": 345,
"wie": 346,
"och": 347,
"ung": 348,
"ann": 349,
"ür": 350,
"hn": 351,
"ihr": 352,
"sen": 353,
"tz": 354,
"dem": 355,
"eit": 356,
"hat": 357,
"wir": 358,
"von": 359,
"wei": 360,
"ier": 361,
"ra": 362,
"einen": 363,
"vor": 364,
"als": 365,
"wo": 366,
"rei": 367,
"ste": 368,
"lie": 369,
"auch": 370,
"du": 371,
"des": 372,
"ko": 373,
"über": 374,
"bei": 375,
"hen": 376,
"hm": 377,
"lei": 378,
"aber": 379,
"wen": 380,
"hl": 381,
"ger": 382,
"nach": 383,
"ft": 384,
"imm": 385,
"je": 386,
"schen": 387,
"wer": 388,
"ser": 389,
"än": 390,
"sein": 391,
"ol": 392,
"cht": 393,
"für": 394,
"kl": 395,
"ff": 396,
"einem": 397,
"nen": 398,
"ja": 399,
"noch": 400,
"hatte": 401,
"pf": 402,
"hin": 403,
"di": 404,
"chen": 405,
"rü": 406,
"iel": 407,
"sel": 408,
"dass": 409,
"ihn": 410,
"mir": 411,
"schl": 412,
"ön": 413,
"gan": 414,
"gt": 415,
"einer": 416,
"sten": 417,
"mich": 418,
"wenn": 419,
"ell": 420,
"gte": 421,
"mal": 422,
"gel": 423,
"ken": 424,
"nur": 425,
"mmen": 426,
"fü": 427,
"ern": 428,
"ör": 429,
"unter": 430,
"ander": 431,
"dur": 432,
"uch": 433,
"ta": 434,
"men": 435,
"mach": 436,
"doch": 437,
"durch": 438,
"os": 439,
"gl": 440,
"hal": 441,
"ihre": 442,
"wä": 443,
"immer": 444,
"ihm": 445,
"kann": 446,
"ort": 447,
"dann": 448,
"lan": 449,
"tzt": 450,
"oder": 451,
"hren": 452,
"et": 453,
"kön": 454,
"ick": 455,
"fa": 456,
"wieder": 457,
"daß": 458,
"mein": 459,
"fen": 460,
"ganz": 461,
"diese": 462,
"ster": 463,
"dar": 464,
"wa": 465,
"ges": 466,
"na": 467,
"fl": 468,
"igen": 469,
"sche": 470,
"ungen": 471,
"mehr": 472,
"ßen": 473,
"ot": 474,
"kon": 475,
"gew": 476,
"haben": 477,
"geh": 478,
"ät": 479,
"sind": 480,
"dr": 481,
"wel": 482,
"uns": 483,
"vo": 484,
"ma": 485,
"ute": 486,
"schon": 487,
"bes": 488,
"gesch": 489,
"bt": 490,
"che": 491,
"son": 492,
"ob": 493,
"la": 494,
"rück": 495,
"seine": 496,
"kr": 497,
"fre": 498,
"eil": 499,
"zum": 500,
"hier": 501,
"kt": 502,
"ige": 503,
"spr": 504,
"leben": 505,
"bst": 506,
"zeit": 507,
"gro": 508,
"denn": 509,
"ho": 510,
"scha": 511,
"bar": 512,
"alle": 513,
"gegen": 514,
"wür": 515,
"mü": 516,
"ze": 517,
"werden": 518,
"jetzt": 519,
"kommen": 520,
"nie": 521,
"sei": 522,
"heit": 523,
"soll": 524,
"glei": 525,
"meine": 526,
"woll": 527,
"ner": 528,
"habe": 529,
"wur": 530,
"lichen": 531,
"assen": 532,
"nte": 533,
"sehen": 534,
"wird": 535,
"bis": 536,
"gar": 537,
"ien": 538,
"mus": 539,
"uß": 540,
"är": 541,
"stell": 542,
"keit": 543,
"zwei": 544,
"selbst": 545,
"sta": 546,
"pa": 547,
"sagte": 548,
"tet": 549,
"kam": 550,
"ssen": 551,
"viel": 552,
"ug": 553,
"zen": 554,
"hei": 555,
"mann": 556,
"will": 557,
"geb": 558,
"waren": 559,
"ück": 560,
"äch": 561,
"mer": 562,
"ru": 563,
"hau": 564,
"eigen": 565,
"ang": 566,
"weg": 567,
"blick": 568,
"fra": 569,
"alles": 570,
"ka": 571,
"augen": 572,
"fin": 573,
"liche": 574,
"unser": 575,
"dern": 576,
"herr": 577,
"nun": 578,
"vie": 579,
"chte": 580,
"wohl": 581,
"fall": 582,
"ht": 583,
"ün": 584,
"etwas": 585,
"stand": 586,
"äu": 587,
"mö": 588,
"tel": 589,
"rie": 590,
"dich": 591,
"dies": 592,
"hand": 593,
"bin": 594,
"ffen": 595,
"nichts": 596,
"dan": 597,
"hne": 598,
"ihnen": 599,
"esen": 600,
"dieser": 601,
"frau": 602,
"art": 603,
"dir": 604,
"isch": 605,
"erst": 606,
"gleich": 607,
"komm": 608,
"hör": 609,
"ße": 610,
"dig": 611,
"sehr": 612,
"zei": 613,
"sam": 614,
"aum": 615,
"hät": 616,
"ingen": 617,
"gut": 618,
"mut": 619,
"cken": 620,
"konnte": 621,
"stimm": 622,
"zur": 623,
"itz": 624,
"weil": 625,
"würde": 626,
"fä": 627,
"können": 628,
"keine": 629,
"fer": 630,
"ischen": 631,
"voll": 632,
"eines": 633,
"setz": 634,
"zie": 635,
"del": 636,
"tete": 637,
"seiner": 638,
"ieren": 639,
"gest": 640,
"zurück": 641,
"wurde": 642,
"schn": 643,
"pr": 644,
"ließ": 645,
"tra": 646,
"mä": 647,
"gend": 648,
"fol": 649,
"ik": 650,
"schla": 651,
"schaft": 652,
"ater": 653,
"weiß": 654,
"seinen": 655,
"lassen": 656,
"lu": 657,
"unden": 658,
"teil": 659,
"neu": 660,
"iert": 661,
"menschen": 662,
"hmen": 663,
"str": 664,
"gi": 665,
"sah": 666,
"ihren": 667,
"eln": 668,
"weiter": 669,
"gehen": 670,
"iger": 671,
"macht": 672,
"tag": 673,
"also": 674,
"halten": 675,
"nis": 676,
"acht": 677,
"geben": 678,
"og": 679,
"nat": 680,
"mar": 681,
"det": 682,
"ohne": 683,
"haus": 684,
"tro": 685,
"ange": 686,
"lau": 687,
"spiel": 688,
"tre": 689,
"schr": 690,
"inn": 691,
"los": 692,
"machen": 693,
"hätte": 694,
"beg": 695,
"wirk": 696,
"alt": 697,
"glich": 698,
"tes": 699,
"richt": 700,
"freund": 701,
"ihrer": 702,
"fel": 703,
"bel": 704,
"sol": 705,
"einmal": 706,
"eben": 707,
"hol": 708,
"hän": 709,
"tern": 710,
"hö": 711,
"schw": 712,
"recht": 713,
"wahr": 714,
"seinem": 715,
"stehen": 716,
"hlen": 717,
"ins": 718,
"ging": 719,
"wollte": 720,
"wissen": 721,
"ungs": 722,
"ald": 723,
"ass": 724,
"jahr": 725,
"mor": 726,
"welt": 727,
"under": 728,
"zusa": 729,
"kopf": 730,
"lang": 731,
"hinter": 732,
"atz": 733,
"stra": 734,
"angen": 735,
"ank": 736,
"ade": 737,
"glau": 738,
"fach": 739,
"hatten": 740,
"fort": 741,
"eicht": 742,
"iff": 743,
"ler": 744,
"mei": 745,
"diesem": 746,
"kein": 747,
"frei": 748,
"führ": 749,
"vom": 750,
"β": 751,
"ai": 752,
"ait": 753,
"que": 754,
"les": 755,
"av": 756,
"ais": 757,
"oi": 758,
"eu": 759,
"lle": 760,
"par": 761,
"ans": 762,
"ment": 763,
"ét": 764,
"une": 765,
"pas": 766,
"qui": 767,
"elle": 768,
"dé": 769,
"pour": 770,
"dans": 771,
"ré": 772,
"tou": 773,
"vous": 774,
"vi": 775,
"ouv": 776,
"mon": 777,
"sur": 778,
"ci": 779,
"plu": 780,
"ère": 781,
"mais": 782,
"ois": 783,
"plus": 784,
"ée": 785,
"aient": 786,
"mp": 787,
"lui": 788,
"ave": 789,
"était": 790,
"ses": 791,
"tout": 792,
"oir": 793,
"avait": 794,
"és": 795,
"mes": 796,
"nous": 797,
"eux": 798,
"bi": 799,
"ons": 800,
"pu": 801,
"ces": 802,
"tu": 803,
"leur": 804,
"don": 805,
"eur": 806,
"ette": 807,
"aire": 808,
"avec": 809,
"dit": 810,
"té": 811,
"ille": 812,
"comme": 813,
"cr": 814,
"ux": 815,
"ès": 816,
"aux": 817,
"jour": 818,
"ils": 819,
"bien": 820,
"cou": 821,
"quel": 822,
"peu": 823,
"cette": 824,
"cu": 825,
"mê": 826,
"fait": 827,
"gu": 828,
"être": 829,
"ité": 830,
"ens": 831,
"ni": 832,
"lé": 833,
"dis": 834,
"ble": 835,
"né": 836,
"puis": 837,
"même": 838,
"ques": 839,
"fi": 840,
"age": 841,
"moi": 842,
"ence": 843,
"ont": 844,
"main": 845,
"ors": 846,
"aut": 847,
"ance": 848,
"mé": 849,
"sans": 850,
"sé": 851,
"lon": 852,
"hom": 853,
"car": 854,
"able": 855,
"cher": 856,
"deux": 857,
"enf": 858,
"où": 859,
"ph": 860,
"ure": 861,
"temp": 862,
"pos": 863,
"rent": 864,
"pé": 865,
"faire": 866,
"pi": 867,
"tres": 868,
"ça": 869,
"endre": 870,
"bon": 871,
"sou": 872,
"int": 873,
"pré": 874,
"sent": 875,
"tant": 876,
"cer": 877,
"là": 878,
"lais": 879,
"près": 880,
"bre": 881,
"cour": 882,
"pet": 883,
"comp": 884,
"lait": 885,
"trouv": 886,
"entre": 887,
"sont": 888,
"dev": 889,
"nu": 890,
"temps": 891,
"dou": 892,
"rait": 893,
"bou": 894,
"quand": 895,
"jours": 896,
"avoir": 897,
"été": 898,
"ale": 899,
"pre": 900,
"fois": 901,
"orte": 902,
"vé": 903,
"non": 904,
"tous": 905,
"jus": 906,
"coup": 907,
"homme": 908,
"ête": 909,
"aussi": 910,
"urs": 911,
"seu": 912,
"ord": 913,
"min": 914,
"gé": 915,
"core": 916,
"va": 917,
"vre": 918,
"encore": 919,
"sem": 920,
"ite": 921,
"autre": 922,
"pris": 923,
"peut": 924,
"ue": 925,
"ante": 926,
"gn": 927,
"rép": 928,
"hu": 929,
"sion": 930,
"votre": 931,
"dire": 932,
"ez": 933,
"fem": 934,
"leurs": 935,
"met": 936,
"cri": 937,
"mis": 938,
"tour": 939,
"rai": 940,
"jam": 941,
"regar": 942,
"rien": 943,
"vers": 944,
"suis": 945,
"pouv": 946,
"vis": 947,
"grand": 948,
"ants": 949,
"cor": 950,
"rer": 951,
"cé": 952,
"tent": 953,
"pres": 954,
"vou": 955,
"alors": 956,
"sieur": 957,
"aine": 958,
"quoi": 959,
"fon": 960,
"endant": 961,
"arri": 962,
"eure": 963,
"après": 964,
"donc": 965,
"itu": 966,
"lè": 967,
"sait": 968,
"toi": 969,
"cha": 970,
"ail": 971,
"asse": 972,
"imp": 973,
"voy": 974,
"conn": 975,
"pla": 976,
"petit": 977,
"avant": 978,
"nom": 979,
"tin": 980,
"dont": 981,
"sous": 982,
"emp": 983,
"person": 984,
"elles": 985,
"beau": 986,
"parti": 987,
"cho": 988,
"prit": 989,
"toujours": 990,
"rais": 991,
"jamais": 992,
"trav": 993,
"tions": 994,
"très": 995,
"voi": 996,
"ren": 997,
"yeux": 998,
"voir": 999,
"premi": 1000,
"gne": 1001,
"heure": 1002,
"rou": 1003,
"eff": 1004,
"notre": 1005,
"ments": 1006,
"ton": 1007,
"fais": 1008,
"cela": 1009,
"répon": 1010,
"cons": 1011,
"air": 1012,
"ôt": 1013,
"pendant": 1014,
"ici": 1015,
"toute": 1016,
"jet": 1017,
"port": 1018,
"étaient": 1019,
"pen": 1020,
"hé": 1021,
"autres": 1022,
"père": 1023,
"oc": 1024,
"quelques": 1025,
"ique": 1026,
"lis": 1027,
"femme": 1028,
"jou": 1029,
"teur": 1030,
"monde": 1031,
"nes": 1032,
"dre": 1033,
"aff": 1034,
"rap": 1035,
"part": 1036,
"lement": 1037,
"cla": 1038,
"fut": 1039,
"quelque": 1040,
"prendre": 1041,
"rê": 1042,
"aille": 1043,
"sais": 1044,
"ches": 1045,
"let": 1046,
"char": 1047,
"ères": 1048,
"ents": 1049,
"moins": 1050,
"eau": 1051,
"aî": 1052,
"jeu": 1053,
"heur": 1054,
"ées": 1055,
"tri": 1056,
"point": 1057,
"mom": 1058,
"vent": 1059,
"nouv": 1060,
"gran": 1061,
"trois": 1062,
"sant": 1063,
"toutes": 1064,
"contre": 1065,
"èrent": 1066,
"chez": 1067,
"avez": 1068,
"ût": 1069,
"att": 1070,
"pau": 1071,
"porte": 1072,
"ouver": 1073,
"lit": 1074,
"prés": 1075,
"chose": 1076,
"vit": 1077,
"monsieur": 1078,
"hab": 1079,
"tête": 1080,
"ju": 1081,
"tement": 1082,
"ction": 1083,
"vrai": 1084,
"lar": 1085,
"cet": 1086,
"regard": 1087,
"lant": 1088,
"som": 1089,
"moment": 1090,
"illes": 1091,
"ple": 1092,
"ps": 1093,
"mère": 1094,
"cl": 1095,
"sour": 1096,
"ys": 1097,
"trop": 1098,
"enne": 1099,
"jusqu": 1100,
"avaient": 1101,
"avais": 1102,
"jeune": 1103,
"depuis": 1104,
"personne": 1105,
"fit": 1106,
"cert": 1107,
"jo": 1108,
"oui": 1109,
"rest": 1110,
"semb": 1111,
"cap": 1112,
"mat": 1113,
"mu": 1114,
"long": 1115,
"fran": 1116,
"faut": 1117,
"iti": 1118,
"bli": 1119,
"chev": 1120,
"pri": 1121,
"ente": 1122,
"ainsi": 1123,
"cham": 1124,
"lors": 1125,
"cas": 1126,
"ili": 1127,
"bé": 1128,
"nos": 1129,
"sui": 1130,
"rit": 1131,
"cro": 1132,
"gue": 1133,
"ía": 1134,
"por": 1135,
"las": 1136,
"ón": 1137,
"una": 1138,
"aba": 1139,
"dos": 1140,
"era": 1141,
"mb": 1142,
"para": 1143,
"ás": 1144,
"mos": 1145,
"ando": 1146,
"como": 1147,
"más": 1148,
"ción": 1149,
"tan": 1150,
"dad": 1151,
"ado": 1152,
"fu": 1153,
"cia": 1154,
"mente": 1155,
"sus": 1156,
"tar": 1157,
"za": 1158,
"ba": 1159,
"pero": 1160,
"sin": 1161,
"lla": 1162,
"án": 1163,
"ia": 1164,
"ran": 1165,
"ga": 1166,
"yo": 1167,
"tos": 1168,
"cos": 1169,
"ya": 1170,
"ones": 1171,
"había": 1172,
"hi": 1173,
"esta": 1174,
"mas": 1175,
"tor": 1176,
"aban": 1177,
"dor": 1178,
"ían": 1179,
"tas": 1180,
"én": 1181,
"endo": 1182,
"aque": 1183,
"ero": 1184,
"io": 1185,
"qué": 1186,
"cab": 1187,
"tal": 1188,
"señ": 1189,
"ora": 1190,
"todo": 1191,
"sal": 1192,
"cuando": 1193,
"gun": 1194,
"bu": 1195,
"ras": 1196,
"esto": 1197,
"pare": 1198,
"él": 1199,
"tras": 1200,
"jos": 1201,
"mien": 1202,
"pue": 1203,
"cre": 1204,
"pon": 1205,
"día": 1206,
"tros": 1207,
"sab": 1208,
"sobre": 1209,
"ese": 1210,
"mbre": 1211,
"eron": 1212,
"añ": 1213,
"ido": 1214,
"porque": 1215,
"ella": 1216,
"cen": 1217,
"muy": 1218,
"cal": 1219,
"este": 1220,
"has": 1221,
"có": 1222,
"gra": 1223,
"ros": 1224,
"aquel": 1225,
"dijo": 1226,
"cía": 1227,
"zo": 1228,
"ciones": 1229,
"mbi": 1230,
"elo": 1231,
"tó": 1232,
"ina": 1233,
"todos": 1234,
"tien": 1235,
"estaba": 1236,
"deci": 1237,
"cio": 1238,
"ño": 1239,
"lor": 1240,
"nues": 1241,
"medi": 1242,
"len": 1243,
"vida": 1244,
"ali": 1245,
"pues": 1246,
"ales": 1247,
"vol": 1248,
"mí": 1249,
"rar": 1250,
"cion": 1251,
"hasta": 1252,
"señor": 1253,
"cono": 1254,
"ah": 1255,
"dios": 1256,
"esa": 1257,
"ún": 1258,
"var": 1259,
"san": 1260,
"gui": 1261,
"otros": 1262,
"tado": 1263,
"buen": 1264,
"ña": 1265,
"tiemp": 1266,
"hacer": 1267,
"jer": 1268,
"vu": 1269,
"ana": 1270,
"así": 1271,
"antes": 1272,
"vez": 1273,
"miento": 1274,
"jar": 1275,
"lab": 1276,
"casa": 1277,
"eso": 1278,
"ego": 1279,
"dió": 1280,
"está": 1281,
"encia": 1282,
"eli": 1283,
"ías": 1284,
"tiempo": 1285,
"zar": 1286,
"van": 1287,
"mun": 1288,
"erta": 1289,
"tambi": 1290,
"sí": 1291,
"aun": 1292,
"mismo": 1293,
"entes": 1294,
"mano": 1295,
"ele": 1296,
"nada": 1297,
"segu": 1298,
"mej": 1299,
"erra": 1300,
"tir": 1301,
"uno": 1302,
"donde": 1303,
"toda": 1304,
"desde": 1305,
"también": 1306,
"cuer": 1307,
"hombre": 1308,
"otro": 1309,
"lib": 1310,
"trar": 1311,
"cual": 1312,
"hay": 1313,
"cada": 1314,
"taba": 1315,
"mento": 1316,
"tenía": 1317,
"quer": 1318,
"eran": 1319,
"siemp": 1320,
"siempre": 1321,
"erto": 1322,
"quí": 1323,
"gos": 1324,
"pués": 1325,
"ellos": 1326,
"después": 1327,
"nue": 1328,
"llo": 1329,
"inter": 1330,
"cómo": 1331,
"ahora": 1332,
"uste": 1333,
"traba": 1334,
"lado": 1335,
"ino": 1336,
"poco": 1337,
"erte": 1338,
"mujer": 1339,
"quier": 1340,
"algun": 1341,
"fue": 1342,
"ojos": 1343,
"enton": 1344,
"vos": 1345,
"esper": 1346,
"much": 1347,
"otra": 1348,
"az": 1349,
"eza": 1350,
"aquí": 1351,
"cias": 1352,
"gua": 1353,
"mucho": 1354,
"decir": 1355,
"esti": 1356,
"idad": 1357,
"algo": 1358,
"ocu": 1359,
"entonces": 1360,
"dido": 1361,
"entos": 1362,
"gri": 1363,
"dado": 1364,
"ios": 1365,
"dose": 1366,
"usted": 1367,
"quien": 1368,
"ami": 1369,
"unto": 1370,
"mejor": 1371,
"bas": 1372,
"solo": 1373,
"pregun": 1374,
"tur": 1375,
"alg": 1376,
"todas": 1377,
"parte": 1378,
"emb": 1379,
"cto": 1380,
"mundo": 1381,
"tiene": 1382,
"tante": 1383,
"palab": 1384,
"tran": 1385,
"aquella": 1386,
"cios": 1387,
"aunque": 1388,
"cuen": 1389,
"tener": 1390,
"fun": 1391,
"respon": 1392,
"allí": 1393,
"xi": 1394,
"han": 1395,
"pens": 1396,
"contra": 1397,
"tura": 1398,
"val": 1399,
"dio": 1400,
"tanto": 1401,
"camin": 1402,
"mó": 1403,
"esp": 1404,
"ada": 1405,
"ío": 1406,
"hacia": 1407,
"dej": 1408,
"estar": 1409,
"ión": 1410,
"gas": 1411,
"vas": 1412,
"noche": 1413,
"ér": 1414,
"años": 1415,
"padre": 1416,
"gus": 1417,
"ár": 1418,
"sino": 1419,
"manos": 1420,
"cido": 1421,
"estu": 1422,
"hubi": 1423,
"vir": 1424,
"bri": 1425,
"raz": 1426,
"chi": 1427,
"puede": 1428,
"menos": 1429,
"habi": 1430,
"homb": 1431,
"neces": 1432,
"may": 1433,
"eros": 1434,
"ría": 1435,
"hecho": 1436,
"escu": 1437,
"lti": 1438,
"ándo": 1439,
"bus": 1440,
"cosas": 1441,
"tú": 1442,
"espa": 1443,
"reci": 1444,
"ctor": 1445,
"prim": 1446,
"dia": 1447,
"dese": 1448,
"mientras": 1449,
"hor": 1450,
"fuer": 1451,
"ida": 1452,
"posi": 1453,
"lante": 1454,
"ano": 1455,
"estas": 1456,
"pli": 1457,
"luego": 1458,
"sión": 1459,
"cin": 1460,
"tierra": 1461,
"guar": 1462,
"cado": 1463,
"encon": 1464,
"pren": 1465,
"mayor": 1466,
"fal": 1467,
"ð": 1468,
"ħ": 1469,
"ň": 1470,
"ə": 1471,
"θ": 1472,
"’": 1473,
"“": 1474,
"”": 1475,
"zi": 1476,
"gli": 1477,
"tto": 1478,
"ono": 1479,
"nel": 1480,
"tti": 1481,
"della": 1482,
"zione": 1483,
"tta": 1484,
"tà": 1485,
"uo": 1486,
"come": 1487,
"alla": 1488,
"oni": 1489,
"ggi": 1490,
"ssi": 1491,
"più": 1492,
"ini": 1493,
"bb": 1494,
"sto": 1495,
"sono": 1496,
"eri": 1497,
"sse": 1498,
"sc": 1499,
"sul": 1500,
"vano": 1501,
"sti": 1502,
"suo": 1503,
"cchi": 1504,
"zza": 1505,
"anche": 1506,
"tte": 1507,
"sci": 1508,
"col": 1509,
"sso": 1510,
"ssa": 1511,
"dei": 1512,
"aveva": 1513,
"zz": 1514,
"amo": 1515,
"gno": 1516,
"sua": 1517,
"ria": 1518,
"sì": 1519,
"ché": 1520,
"dal": 1521,
"ona": 1522,
"spe": 1523,
"gni": 1524,
"tt": 1525,
"delle": 1526,
"questo": 1527,
"nella": 1528,
"dere": 1529,
"anno": 1530,
"dell": 1531,
"uni": 1532,
"bbe": 1533,
"anti": 1534,
"ene": 1535,
"gio": 1536,
"uto": 1537,
"qual": 1538,
"glia": 1539,
"quando": 1540,
"tutto": 1541,
"glio": 1542,
"zioni": 1543,
"cam": 1544,
"esso": 1545,
"ss": 1546,
"mol": 1547,
"loro": 1548,
"perché": 1549,
"cosa": 1550,
"due": 1551,
"poi": 1552,
"sco": 1553,
"cco": 1554,
"gna": 1555,
"tem": 1556,
"prima": 1557,
"così": 1558,
"essere": 1559,
"ani": 1560,
"bra": 1561,
"rio": 1562,
"anco": 1563,
"cui": 1564,
"spi": 1565,
"via": 1566,
"gior": 1567,
"bile": 1568,
"ggio": 1569,
"mai": 1570,
"tare": 1571,
"indi": 1572,
"rebbe": 1573,
"senza": 1574,
"zio": 1575,
"tutti": 1576,
"stato": 1577,
"zia": 1578,
"dalla": 1579,
"mia": 1580,
"vita": 1581,
"quella": 1582,
"qua": 1583,
"dove": 1584,
"allo": 1585,
"sempre": 1586,
"zzo": 1587,
"sia": 1588,
"dopo": 1589,
"porta": 1590,
"ccia": 1591,
"erano": 1592,
"anni": 1593,
"chia": 1594,
"enza": 1595,
"propri": 1596,
"anda": 1597,
"cca": 1598,
"occhi": 1599,
"questa": 1600,
"ffi": 1601,
"ron": 1602,
"mio": 1603,
"ris": 1604,
"ogni": 1605,
"rin": 1606,
"far": 1607,
"menti": 1608,
"ancora": 1609,
"fatto": 1610,
"mani": 1611,
"senti": 1612,
"pra": 1613,
"tempo": 1614,
"essi": 1615,
"bbi": 1616,
"lare": 1617,
"pers": 1618,
"sor": 1619,
"anza": 1620,
"pie": 1621,
"verso": 1622,
"altro": 1623,
"tato": 1624,
"cato": 1625,
"ato": 1626,
"volta": 1627,
"cc": 1628,
"fare": 1629,
"ciò": 1630,
"bili": 1631,
"nuo": 1632,
"quello": 1633,
"colo": 1634,
"ppo": 1635,
"trova": 1636,
"ore": 1637,
"rono": 1638,
"molto": 1639,
"almente": 1640,
"sca": 1641,
"vole": 1642,
"tali": 1643,
"sulla": 1644,
"sce": 1645,
"meno": 1646,
"anto": 1647,
"pun": 1648,
"stu": 1649,
"capi": 1650,
"giu": 1651,
"mini": 1652,
"pia": 1653,
"lavo": 1654,
"vero": 1655,
"rsi": 1656,
"altri": 1657,
"scia": 1658,
"suoi": 1659,
"glie": 1660,
"sotto": 1661,
"bene": 1662,
"scri": 1663,
"tale": 1664,
"degli": 1665,
"alc": 1666,
"uomo": 1667,
"pel": 1668,
"pote": 1669,
"essa": 1670,
"scu": 1671,
"signo": 1672,
"stro": 1673,
"uti": 1674,
"sione": 1675,
"gre": 1676,
"fini": 1677,
"lun": 1678,
"esi": 1679,
"passa": 1680,
"rà": 1681,
"mentre": 1682,
"hanno": 1683,
"usci": 1684,
"gia": 1685,
"già": 1686,
"mina": 1687,
"tica": 1688,
"giorno": 1689,
"esse": 1690,
"modo": 1691,
"spa": 1692,
"proprio": 1693,
"ori": 1694,
"contro": 1695,
"stru": 1696,
"diven": 1697,
"disse": 1698,
"rato": 1699,
"noi": 1700,
"vere": 1701,
"può": 1702,
"dice": 1703,
"cci": 1704,
"secon": 1705,
"ccio": 1706,
"qualche": 1707,
"tutta": 1708,
"gg": 1709,
"mondo": 1710,
"forma": 1711,
"mma": 1712,
"pensa": 1713,
"deva": 1714,
"fosse": 1715,
"sopra": 1716,
"tamente": 1717,
"ness": 1718,
"quanto": 1719,
"raga": 1720,
"unque": 1721,
"care": 1722,
"stre": 1723,
"grande": 1724,
"picco": 1725,
"guarda": 1726,
"nell": 1727,
"possi": 1728,
"presen": 1729,
"rò": 1730,
"paro": 1731,
"tua": 1732,
"vin": 1733,
"ane": 1734,
"stesso": 1735,
"dav": 1736,
"nei": 1737,
"nelle": 1738,
"ghi": 1739,
"pio": 1740,
"lato": 1741,
"sid": 1742,
"fine": 1743,
"fuo": 1744,
"quasi": 1745,
"ulti": 1746,
"ito": 1747,
"sue": 1748,
"fil": 1749,
"allora": 1750,
"veni": 1751,
"tano": 1752,
"ello": 1753,
"ão": 1754,
"não": 1755,
"uma": 1756,
"ela": 1757,
"lh": 1758,
"ção": 1759,
"cê": 1760,
"inha": 1761,
"você": 1762,
"ec": 1763,
"dade": 1764,
"ao": 1765,
"ram": 1766,
"vel": 1767,
"ém": 1768,
"pode": 1769,
"estava": 1770,
"isso": 1771,
"mui": 1772,
"faz": 1773,
"ões": 1774,
"pes": 1775,
"ix": 1776,
"sim": 1777,
"olh": 1778,
"isa": 1779,
"ên": 1780,
"tinha": 1781,
"meu": 1782,
"são": 1783,
"minha": 1784,
"muito": 1785,
"foi": 1786,
"bem": 1787,
"diz": 1788,
"parec": 1789,
"ço": 1790,
"pesso": 1791,
"pois": 1792,
"mesmo": 1793,
"ções": 1794,
"seus": 1795,
"até": 1796,
"ência": 1797,
"lhe": 1798,
"tiv": 1799,
"mã": 1800,
"só": 1801,
"tão": 1802,
"tudo": 1803,
"então": 1804,
"inda": 1805,
"bal": 1806,
"indo": 1807,
"ndo": 1808,
"já": 1809,
"vam": 1810,
"eito": 1811,
"depois": 1812,
"mel": 1813,
"lha": 1814,
"ainda": 1815,
"fazer": 1816,
"pou": 1817,
"pergun": 1818,
"deix": 1819,
"tamb": 1820,
"ala": 1821,
"pelo": 1822,
"também": 1823,
"fica": 1824,
"prec": 1825,
"eles": 1826,
"havia": 1827,
"lá": 1828,
"nas": 1829,
"gem": 1830,
"mem": 1831,
"ós": 1832,
"deu": 1833,
"eiro": 1834,
"..": 1835,
"assim": 1836,
"ior": 1837,
"har": 1838,
"aqui": 1839,
"cul": 1840,
"sar": 1841,
"outra": 1842,
"olhos": 1843,
"ima": 1844,
"mim": 1845,
"ago": 1846,
"pessoas": 1847,
"eram": 1848,
"eira": 1849,
"pela": 1850,
"coisa": 1851,
"mão": 1852,
"conh": 1853,
"agora": 1854,
"iam": 1855,
"há": 1856,
"suas": 1857,
"guém": 1858,
"cabe": 1859,
"nem": 1860,
"ível": 1861,
"consegu": 1862,
"trabal": 1863,
"lev": 1864,
"lem": 1865,
"vai": 1866,
"tei": 1867,
"pró": 1868,
"quem": 1869,
"onde": 1870,
"cabeça": 1871,
"nunca": 1872,
"mentos": 1873,
"hum": 1874,
"dele": 1875,
"verdade": 1876,
"tá": 1877,
"hos": 1878,
"algum": 1879,
"dizer": 1880,
"penas": 1881,
"nós": 1882,
"enquanto": 1883,
"outro": 1884,
"lho": 1885,
"melhor": 1886,
"primei": 1887,
"iu": 1888,
"apenas": 1889,
"estou": 1890,
"conte": 1891,
"homem": 1892,
"dois": 1893,
"ças": 1894,
"pouco": 1895,
"senhor": 1896,
"tando": 1897,
"espera": 1898,
"pai": 1899,
"rios": 1900,
"baix": 1901,
"ase": 1902,
"isas": 1903,
"hora": 1904,
"ficar": 1905,
"seja": 1906,
"ân": 1907,
"clar": 1908,
"inc": 1909,
"fos": 1910,
"ouvi": 1911,
"vem": 1912,
"tava": 1913,
"ário": 1914,
"sos": 1915,
"inho": 1916,
"rando": 1917,
"ês": 1918,
"coisas": 1919,
"aconte": 1920,
"lher": 1921,
"anos": 1922,
"talvez": 1923,
"estão": 1924,
"liv": 1925,
"outros": 1926,
"qualquer": 1927,
"gou": 1928,
"lí": 1929,
"tivesse": 1930,
"rado": 1931,
"precisa": 1932,
"mãe": 1933,
"dela": 1934,
"entra": 1935,
"maior": 1936,
"noite": 1937,
"tiva": 1938,
"pala": 1939,
"ração": 1940,
"deus": 1941,
"sas": 1942,
"inte": 1943,
"fei": 1944,
"palav": 1945,
"trás": 1946,
"cidade": 1947,
"lugar": 1948,
"vezes": 1949,
"encontra": 1950,
"tru": 1951,
"eci": 1952,
"ın": 1953,
"bir": 1954,
"yor": 1955,
"ek": 1956,
"dı": 1957,
"ey": 1958,
"tı": 1959,
"mı": 1960,
"iz": 1961,
"ır": 1962,
"gö": 1963,
"sı": 1964,
"bil": 1965,
"lı": 1966,
"üz": 1967,
"iç": 1968,
"iy": 1969,
"ım": 1970,
"uz": 1971,
"cak": 1972,
"iş": 1973,
"ını": 1974,
"iyor": 1975,
"baş": 1976,
"dü": 1977,
"değ": 1978,
"kar": 1979,
"ev": 1980,
"öy": 1981,
"bun": 1982,
"yap": 1983,
"sun": 1984,
"gör": 1985,
"yı": 1986,
"ki": 1987,
"ara": 1988,
"alı": 1989,
"onu": 1990,
"çı": 1991,
"şey": 1992,
"sın": 1993,
"kı": 1994,
"kad": 1995,
"ağ": 1996,
"değil": 1997,
"ük": 1998,
"çok": 1999,
"şı": 2000,
"ül": 2001,
"için": 2002,
"eye": 2003,
"oldu": 2004,
"mış": 2005,
"kal": 2006,
"mek": 2007,
"öyle": 2008,
"yordu": 2009,
"yüz": 2010,
"miş": 2011,
"mak": 2012,
"ola": 2013,
"yan": 2014,
"cek": 2015,
"yorum": 2016,
"bak": 2017,
"üm": 2018,
"ları": 2019,
"oğ": 2020,
"kadar": 2021,
"arı": 2022,
"ında": 2023,
"gün": 2024,
"yok": 2025,
"yer": 2026,
"dım": 2027,
"daha": 2028,
"ına": 2029,
"dim": 2030,
"bilir": 2031,
"iki": 2032,
"siz": 2033,
"diğ": 2034,
"bü": 2035,
"düş": 2036,
"üç": 2037,
"unu": 2038,
"aman": 2039,
"fak": 2040,
"ede": 2041,
"sonra": 2042,
"hiç": 2043,
"aki": 2044,
"ğı": 2045,
"bul": 2046,
"maz": 2047,
"anla": 2048,
"bura": 2049,
"geç": 2050,
"maya": 2051,
"konu": 2052,
"din": 2053,
"tek": 2054,
"zaman": 2055,
"eler": 2056,
"öz": 2057,
"dır": 2058,
"gibi": 2059,
"şa": 2060,
"leri": 2061,
"kim": 2062,
"ku": 2063,
"fakat": 2064,
"yar": 2065,
"göz": 2066,
"cı": 2067,
"yorsun": 2068,
"bek": 2069,
"inde": 2070,
"pek": 2071,
"bunu": 2072,
"lik": 2073,
"iler": 2074,
"edi": 2075,
"öl": 2076,
"sür": 2077,
"sır": 2078,
"çık": 2079,
"sıl": 2080,
"alar": 2081,
"kes": 2082,
"yak": 2083,
"çek": 2084,
"yıl": 2085,
"ecek": 2086,
"ız": 2087,
"git": 2088,
"kap": 2089,
"ama": 2090,
"ıl": 2091,
"ların": 2092,
"biz": 2093,
"tır": 2094,
"oy": 2095,
"ancak": 2096,
"doğ": 2097,
"bana": 2098,
"şim": 2099,
"başla": 2100,
"lü": 2101,
"madı": 2102,
"beni": 2103,
"yük": 2104,
"lık": 2105,
"beş": 2106,
"nasıl": 2107,
"tık": 2108,
"tür": 2109,
"daki": 2110,
"ceğ": 2111,
"zı": 2112,
"iyi": 2113,
"dok": 2114,
"benim": 2115,
"cağ": 2116,
"yen": 2117,
"şu": 2118,
"mez": 2119,
"düşün": 2120,
"kendi": 2121,
"şimdi": 2122,
"yol": 2123,
"yu": 2124,
"iste": 2125,
"sek": 2126,
"mam": 2127,
"söyle": 2128,
"dik": 2129,
"kur": 2130,
"olduğ": 2131,
"sını": 2132,
"biliyor": 2133,
"kan": 2134,
"yal": 2135,
"meye": 2136,
"muş": 2137,
"kaç": 2138,
"iye": 2139,
"tü": 2140,
"ef": 2141,
"tım": 2142,
"evet": 2143,
"yet": 2144,
"burada": 2145,
"tim": 2146,
"biraz": 2147,
"kor": 2148,
"doğru": 2149,
"inin": 2150,
"kız": 2151,
"diye": 2152,
"dör": 2153,
"etti": 2154,
"onun": 2155,
"isti": 2156,
"ği": 2157,
"sana": 2158,
"üş": 2159,
"arka": 2160,
"hayır": 2161,
"karşı": 2162,
"ile": 2163,
"hak": 2164,
"ıyor": 2165,
"neden": 2166,
"sev": 2167,
"sız": 2168,
"çocu": 2169,
"çalı": 2170,
"olur": 2171,
"bır": 2172,
"gir": 2173,
"ise": 2174,
"ih": 2175,
"kır": 2176,
"dön": 2177,
"böyle": 2178,
"seni": 2179,
"!\"": 2180,
"dört": 2181,
"söy": 2182,
"oş": 2183,
"musun": 2184,
"laş": 2185,
"ip": 2186,
"kay": 2187,
"hem": 2188,
"büyük": 2189,
"aç": 2190,
"bırak": 2191,
"misin": 2192,
"söz": 2193,
"değiş": 2194,
"ünü": 2195,
"gül": 2196,
"kö": 2197,
"karı": 2198,
"tamam": 2199,
"olu": 2200,
"yeni": 2201,
"lam": 2202,
"mıştı": 2203,
"yaş": 2204,
"iniz": 2205,
"kadın": 2206,
"bunun": 2207,
"mey": 2208,
"altı": 2209,
"yi": 2210,
"inden": 2211,
"senin": 2212,
"yat": 2213,
"top": 2214,
"isi": 2215,
"dün": 2216,
"hiçbir": 2217,
"yon": 2218,
"dın": 2219,
"tün": 2220,
"başka": 2221,
"hep": 2222,
"irmi": 2223,
"devam": 2224,
"olacak": 2225,
"artık": 2226,
"durum": 2227,
"imiz": 2228,
"üzel": 2229,
"lerini": 2230,
"sağ": 2231,
"gerek": 2232,
"yirmi": 2233,
"şek": 2234,
"bağ": 2235,
"lara": 2236,
"yür": 2237,
"ması": 2238,
"katı": 2239,
"dedi": 2240,
"gü": 2241,
"sorun": 2242,
"üne": 2243,
"mız": 2244,
"yapı": 2245,
"mil": 2246,
"ğını": 2247,
"tara": 2248,
"vardı": 2249,
"konuş": 2250,
"arak": 2251,
"larak": 2252,
"çocuk": 2253,
"bütün": 2254,
"ley": 2255,
"dür": 2256,
"güzel": 2257,
"ayı": 2258,
"yapa": 2259,
"nı": 2260,
"ayr": 2261,
"öne": 2262,
"yordum": 2263,
"ban": 2264,
"i̇ş": 2265,
"dum": 2266,
"yorlar": 2267,
"larını": 2268,
"çıkar": 2269,
"zan": 2270,
"seç": 2271,
"liyor": 2272,
"tak": 2273,
"şık": 2274,
"tekrar": 2275,
"aş": 2276,
"eş": 2277,
"mişti": 2278,
"kin": 2279,
"imi": 2280,
"eğ": 2281,
"gidi": 2282,
"leş": 2283,
"başladı": 2284,
"gide": 2285,
"otur": 2286,
"dde": 2287,
"ından": 2288,
"üzer": 2289,
"ının": 2290,
"nız": 2291,
"uy": 2292,
"yedi": 2293,
"kat": 2294,
"olarak": 2295,
"ladı": 2296,
"yalnız": 2297,
"bah": 2298,
"iyet": 2299,
"sak": 2300,
"açık": 2301,
"sında": 2302,
"...": 2303,
"insan": 2304,
"aynı": 2305,
"eder": 2306,
"istan": 2307,
"uzun": 2308,
"geri": 2309,
"erek": 2310,
"olan": 2311,
"gerçek": 2312,
"alan": 2313,
"dış": 2314,
"alık": 2315,
"fark": 2316,
"üst": 2317,
"sade": 2318,
"kiş": 2319,
"ldı": 2320,
"zor": 2321,
"etir": 2322,
"herkes": 2323,
"ömer": 2324,
"unda": 2325,
"haf": 2326,
"buna": 2327,
"ydı": 2328,
"peki": 2329,
"adam": 2330,
"haz": 2331,
"sına": 2332,
"kapı": 2333,
"görüş": 2334,
"sadece": 2335,
"aldı": 2336,
"geldi": 2337,
"rz": 2338,
"sz": 2339,
"cz": 2340,
"ię": 2341,
"dz": 2342,
"ał": 2343,
"się": 2344,
"rze": 2345,
"że": 2346,
"wy": 2347,
"rzy": 2348,
"ła": 2349,
"ło": 2350,
"ny": 2351,
"dzie": 2352,
"dzi": 2353,
"czy": 2354,
"cie": 2355,
"prze": 2356,
"dy": 2357,
"kie": 2358,
"ry": 2359,
"ją": 2360,
"ów": 2361,
"przy": 2362,
"mie": 2363,
"szy": 2364,
"cze": 2365,
"bie": 2366,
"cy": 2367,
"nia": 2368,
"ści": 2369,
"sze": 2370,
"jest": 2371,
"ży": 2372,
"ną": 2373,
"któ": 2374,
"ała": 2375,
"mnie": 2376,
"ły": 2377,
"cza": 2378,
"jak": 2379,
"roz": 2380,
"ró": 2381,
"zna": 2382,
"łu": 2383,
"ść": 2384,
"wia": 2385,
"wszy": 2386,
"spo": 2387,
"gdy": 2388,
"wał": 2389,
"wię": 2390,
"łem": 2391,
"ję": 2392,
"sk": 2393,
"rę": 2394,
"dob": 2395,
"już": 2396,
"bę": 2397,
"ałem": 2398,
"sza": 2399,
"pod": 2400,
"dla": 2401,
"pan": 2402,
"nę": 2403,
"może": 2404,
"śli": 2405,
"ało": 2406,
"lko": 2407,
"nych": 2408,
"powie": 2409,
"cię": 2410,
"tylko": 2411,
"naj": 2412,
"tego": 2413,
"ski": 2414,
"nego": 2415,
"wszyst": 2416,
"szcze": 2417,
"jed": 2418,
"jej": 2419,
"two": 2420,
"ąd": 2421,
"śmy": 2422,
"czę": 2423,
"wać": 2424,
"jego": 2425,
"ża": 2426,
"sy": 2427,
"praw": 2428,
"tym": 2429,
"który": 2430,
"ały": 2431,
"trze": 2432,
"niej": 2433,
"nym": 2434,
"gło": 2435,
"jąc": 2436,
"mówi": 2437,
"ska": 2438,
"nej": 2439,
"słu": 2440,
"wła": 2441,
"będzie": 2442,
"dę": 2443,
"pó": 2444,
"bez": 2445,
"nic": 2446,
"pła": 2447,
"ście": 2448,
"są": 2449,
"trzy": 2450,
"kiem": 2451,
"był": 2452,
"mog": 2453,
"robi": 2454,
"tam": 2455,
"mię": 2456,
"zy": 2457,
"pew": 2458,
"myś": 2459,
"przed": 2460,
"sko": 2461,
"które": 2462,
"lę": 2463,
"wsze": 2464,
"ąc": 2465,
"było": 2466,
"sobie": 2467,
"py": 2468,
"cią": 2469,
"jeszcze": 2470,
"tę": 2471,
"czas": 2472,
"szę": 2473,
"gł": 2474,
"kę": 2475,
"czu": 2476,
"przez": 2477,
"sło": 2478,
"wz": 2479,
"kto": 2480,
"ków": 2481,
"czo": 2482,
"liśmy": 2483,
"więc": 2484,
"rą": 2485,
"wó": 2486,
"rza": 2487,
"ności": 2488,
"wet": 2489,
"nął": 2490,
"śmie": 2491,
"nawet": 2492,
"musi": 2493,
"swo": 2494,
"tej": 2495,
"wą": 2496,
"wu": 2497,
"wią": 2498,
"niu": 2499,
"czą": 2500,
"dzo": 2501,
"skie": 2502,
"jeśli": 2503,
"czego": 2504,
"chy": 2505,
"dł": 2506,
"tych": 2507,
"bym": 2508,
"żo": 2509,
"eś": 2510,
"sią": 2511,
"kiedy": 2512,
"wró": 2513,
"dze": 2514,
"dro": 2515,
"rów": 2516,
"pani": 2517,
"kul": 2518,
"nad": 2519,
"chwi": 2520,
"nim": 2521,
"być": 2522,
"chodzi": 2523,
"nio": 2524,
"dobrze": 2525,
"teraz": 2526,
"wokul": 2527,
"coś": 2528,
"kł": 2529,
"pier": 2530,
"gdzie": 2531,
"dzy": 2532,
"pię": 2533,
"dź": 2534,
"ką": 2535,
"gó": 2536,
"zda": 2537,
"chce": 2538,
"stę": 2539,
"świa": 2540,
"wszystko": 2541,
"peł": 2542,
"wiem": 2543,
"wiel": 2544,
"każ": 2545,
"rzu": 2546,
"sły": 2547,
"jedna": 2548,
"myśl": 2549,
"mój": 2550,
"jestem": 2551,
"óż": 2552,
"miej": 2553,
"moż": 2554,
"kła": 2555,
"resz": 2556,
"dłu": 2557,
"stwo": 2558,
"nię": 2559,
"masz": 2560,
"żeby": 2561,
"niem": 2562,
"jakie": 2563,
"sty": 2564,
"nią": 2565,
"wej": 2566,
"oj": 2567,
"sła": 2568,
"ność": 2569,
"zło": 2570,
"szczę": 2571,
"lej": 2572,
"wego": 2573,
"cał": 2574,
"dział": 2575,
"kich": 2576,
"dza": 2577,
"dzię": 2578,
"oczy": 2579,
"zosta": 2580,
"czło": 2581,
"nam": 2582,
"kil": 2583,
"szu": 2584,
"wę": 2585,
"miał": 2586,
"strze": 2587,
"cej": 2588,
"ej": 2589,
"znaj": 2590,
"dać": 2591,
"miejs": 2592,
"kró": 2593,
"kry": 2594,
"bardzo": 2595,
"śnie": 2596,
"lą": 2597,
"gie": 2598,
"ciebie": 2599,
"dni": 2600,
"potrze": 2601,
"wokulski": 2602,
"uwa": 2603,
"umie": 2604,
"jednak": 2605,
"kra": 2606,
"wróci": 2607,
"człowie": 2608,
"czyć": 2609,
"była": 2610,
"żeli": 2611,
"mę": 2612,
"cę": 2613,
"zrobi": 2614,
"mogę": 2615,
"prowa": 2616,
"rem": 2617,
"niech": 2618,
"cznie": 2619,
"kro": 2620,
"tą": 2621,
"chci": 2622,
"bro": 2623,
"dzieć": 2624,
"szą": 2625,
"pad": 2626,
"trz": 2627,
"jem": 2628,
"tów": 2629,
"dru": 2630,
"taj": 2631,
"rzekł": 2632,
"niego": 2633,
"takie": 2634,
"wała": 2635,
"towa": 2636,
"kapła": 2637,
"widzi": 2638,
"podob": 2639,
"dzę": 2640,
"tał": 2641,
"stęp": 2642,
"bą": 2643,
"poko": 2644,
"wem": 2645,
"gę": 2646,
"aby": 2647,
"albo": 2648,
"spra": 2649,
"zno": 2650,
"smo": 2651,
"jesz": 2652,
"księ": 2653,
"jesteś": 2654,
"poz": 2655,
"nigdy": 2656,
"ksią": 2657,
"cóż": 2658,
"ws": 2659,
"pow": 2660,
"tka": 2661,
"świe": 2662,
"szka": 2663,
"samo": 2664,
"sł": 2665,
"rzę": 2666,
"nale": 2667,
"chcesz": 2668,
"nik": 2669,
"pę": 2670,
"chyba": 2671,
"ciąg": 2672,
"jący": 2673,
"woj": 2674,
"nasze": 2675,
"mniej": 2676,
"więcej": 2677,
"zwy": 2678,
"osta": 2679,
"waż": 2680,
"śmier": 2681,
"wier": 2682,
"dzą": 2683,
"zaś": 2684,
"gdyby": 2685,
"jaki": 2686,
"wol": 2687,
"win": 2688,
"dą": 2689,
"ścia": 2690,
"rozma": 2691,
"wal": 2692,
"panie": 2693,
"star": 2694,
"kaz": 2695,
"jeżeli": 2696,
"wra": 2697,
"koń": 2698,
"siebie": 2699,
"znowu": 2700,
"czem": 2701,
"stwa": 2702,
"isto": 2703,
"pół": 2704,
"dał": 2705,
"kobie": 2706,
"ałam": 2707,
"wych": 2708,
"cesa": 2709,
"nich": 2710,
"zawsze": 2711,
"dzić": 2712,
"też": 2713,
"lepie": 2714,
"proszę": 2715,
"kre": 2716,
"twa": 2717,
"łą": 2718,
"chu": 2719,
"cą": 2720,
"prz": 2721,
"łe": 2722,
"szedł": 2723,
"odpowie": 2724,
"myśli": 2725,
"świą": 2726,
"ź": 2727,
"ł": 2728,
"&": 2729,
"=": 2730,
"ă": 2731,
"đ": 2732,
"ţ": 2733,
"–": 2734,
"‘": 2735,
"ij": 2736,
"aa": 2737,
"een": 2738,
"het": 2739,
"aar": 2740,
"oor": 2741,
"ijn": 2742,
"dat": 2743,
"oe": 2744,
"ijk": 2745,
"aan": 2746,
"voor": 2747,
"iet": 2748,
"zijn": 2749,
"niet": 2750,
"oo": 2751,
"moet": 2752,
"heb": 2753,
"uit": 2754,
"wij": 2755,
"aat": 2756,
"lijk": 2757,
"sl": 2758,
"daar": 2759,
"deze": 2760,
"worden": 2761,
"moeten": 2762,
"onder": 2763,
"hebben": 2764,
"ook": 2765,
"ct": 2766,
"nog": 2767,
"aal": 2768,
"eer": 2769,
"bij": 2770,
"mijn": 2771,
"kom": 2772,
"atie": 2773,
"eft": 2774,
"kel": 2775,
"rij": 2776,
"heid": 2777,
"af": 2778,
"stel": 2779,
"maar": 2780,
"wee": 2781,
"heeft": 2782,
"waar": 2783,
"eren": 2784,
"wat": 2785,
"wil": 2786,
"aag": 2787,
"bet": 2788,
"hij": 2789,
"kun": 2790,
"uw": 2791,
"dt": 2792,
"door": 2793,
"tij": 2794,
"ond": 2795,
"geen": 2796,
"gev": 2797,
"veel": 2798,
"naar": 2799,
"aten": 2800,
"kunnen": 2801,
"echt": 2802,
"goe": 2803,
"twee": 2804,
"delijk": 2805,
"uur": 2806,
"toe": 2807,
"meer": 2808,
"onze": 2809,
"tijd": 2810,
"hoe": 2811,
"tot": 2812,
"zou": 2813,
"aak": 2814,
"amen": 2815,
"woor": 2816,
"wordt": 2817,
"gelijk": 2818,
"gaan": 2819,
"ker": 2820,
"eld": 2821,
"hou": 2822,
"zel": 2823,
"tegen": 2824,
"komen": 2825,
"werk": 2826,
"goed": 2827,
"zal": 2828,
"zij": 2829,
"slag": 2830,
"zien": 2831,
"echter": 2832,
"itie": 2833,
"tie": 2834,
"elijk": 2835,
"ische": 2836,
"belan": 2837,
"haar": 2838,
"vr": 2839,
"grijk": 2840,
"doen": 2841,
"land": 2842,
"belangrijk": 2843,
"open": 2844,
"ctie": 2845,
"zelf": 2846,
"mij": 2847,
"iteit": 2848,
"stem": 2849,
"mee": 2850,
"aren": 2851,
"dien": 2852,
"gaat": 2853,
"prob": 2854,
"moe": 2855,
"ullen": 2856,
"zich": 2857,
"daarom": 2858,
"orm": 2859,
"staat": 2860,
"zit": 2861,
"dui": 2862,
"dus": 2863,
"ds": 2864,
"verslag": 2865,
"kelijk": 2866,
"proble": 2867,
"schap": 2868,
"gd": 2869,
"hun": 2870,
"erd": 2871,
"zet": 2872,
"staan": 2873,
"maal": 2874,
"inder": 2875,
"eid": 2876,
"kken": 2877,
"ged": 2878,
"zullen": 2879,
"mensen": 2880,
"jaar": 2881,
"regel": 2882,
"ieder": 2883,
"volgen": 2884,
"geven": 2885,
"even": 2886,
"blij": 2887,
"ië": 2888,
"uwe": 2889,
"maken": 2890,
"oek": 2891,
"nieuwe": 2892,
"baar": 2893,
"andere": 2894,
"ruik": 2895,
"agen": 2896,
"ouw": 2897,
"willen": 2898,
"aakt": 2899,
"hoo": 2900,
"anden": 2901,
"lig": 2902,
"samen": 2903,
"zeer": 2904,
"duidelijk": 2905,
"antwoor": 2906,
"heel": 2907,
"punt": 2908,
"houden": 2909,
"vraag": 2910,
"gele": 2911,
"eens": 2912,
"besch": 2913,
"omen": 2914,
"erg": 2915,
"doel": 2916,
"dag": 2917,
"uren": 2918,
"ings": 2919,
"oren": 2920,
"delen": 2921,
"steun": 2922,
"innen": 2923,
"pol": 2924,
"oon": 2925,
"sn": 2926,
"zonder": 2927,
"nodig": 2928,
"alleen": 2929,
"mid": 2930,
"ragen": 2931,
"iets": 2932,
"versch": 2933,
"gebruik": 2934,
"rouw": 2935,
"stellen": 2936,
"menten": 2937,
"eerste": 2938,
"laat": 2939,
"groot": 2940,
"ood": 2941,
"toch": 2942,
"laten": 2943,
"aard": 2944,
"sle": 2945,
"deel": 2946,
"plaat": 2947,
"ree": 2948,
"betre": 2949,
"lid": 2950,
"uiten": 2951,
"racht": 2952,
"beleid": 2953,
"stie": 2954,
"staten": 2955,
"ggen": 2956,
"reken": 2957,
"alen": 2958,
"ming": 2959,
"mogelijk": 2960,
"grote": 2961,
"altijd": 2962,
"enkel": 2963,
"wik": 2964,
"politie": 2965,
"elk": 2966,
"handel": 2967,
"kwe": 2968,
"maat": 2969,
"elen": 2970,
"vrij": 2971,
"jes": 2972,
"aam": 2973,
"huis": 2974,
"weer": 2975,
"lidstaten": 2976,
"king": 2977,
"kle": 2978,
"bed": 2979,
"geval": 2980,
"wikkel": 2981,
"kwestie": 2982,
"stee": 2983,
"hel": 2984,
"komst": 2985,
"iden": 2986,
"eerd": 2987,
"tweede": 2988,
"probleem": 2989,
"ussen": 2990,
"snel": 2991,
"tig": 2992,
"ult": 2993,
"nemen": 2994,
"commis": 2995,
"verschil": 2996,
"zoek": 2997,
"krij": 2998,
"graag": 2999,
"denk": 3000,
"landen": 3001,
"reden": 3002,
"besl": 3003,
"oeg": 3004,
"beter": 3005,
"heden": 3006,
"mag": 3007,
"boven": 3008,
"cont": 3009,
"fd": 3010,
"hele": 3011,
"vier": 3012,
"gez": 3013,
"kw": 3014,
"aas": 3015,
"ontwikkel": 3016,
"drie": 3017,
"vaak": 3018,
"plaats": 3019,
"gang": 3020,
"ijf": 3021,
"natuur": 3022,
"tussen": 3023,
"bat": 3024,
"komt": 3025,
"wacht": 3026,
"aad": 3027,
"achter": 3028,
"gebie": 3029,
"verk": 3030,
"ligt": 3031,
"nieuw": 3032,
"vand": 3033,
"ý": 3034,
"ď": 3035,
"ě": 3036,
"ř": 3037,
"ť": 3038,
"ů": 3039,
"„": 3040,
"ní": 3041,
"ně": 3042,
"ře": 3043,
"ná": 3044,
"vě": 3045,
"vá": 3046,
"rá": 3047,
"vy": 3048,
"mě": 3049,
"ři": 3050,
"ří": 3051,
"že": 3052,
"jí": 3053,
"vý": 3054,
"ji": 3055,
"dě": 3056,
"če": 3057,
"tě": 3058,
"ky": 3059,
"še": 3060,
"ké": 3061,
"ší": 3062,
"pře": 3063,
"ví": 3064,
"ný": 3065,
"ži": 3066,
"má": 3067,
"cí": 3068,
"zá": 3069,
"ské": 3070,
"dá": 3071,
"byl": 3072,
"tí": 3073,
"pří": 3074,
"při": 3075,
"či": 3076,
"vní": 3077,
"ča": 3078,
"dí": 3079,
"dní": 3080,
"ká": 3081,
"nou": 3082,
"vět": 3083,
"pě": 3084,
"kou": 3085,
"ých": 3086,
"bě": 3087,
"prá": 3088,
"jako": 3089,
"ží": 3090,
"zí": 3091,
"jsou": 3092,
"jsem": 3093,
"lní": 3094,
"cké": 3095,
"vat": 3096,
"před": 3097,
"hla": 3098,
"stá": 3099,
"čí": 3100,
"ši": 3101,
"kla": 3102,
"ště": 3103,
"lou": 3104,
"mů": 3105,
"chá": 3106,
"pů": 3107,
"také": 3108,
"dů": 3109,
"nost": 3110,
"tře": 3111,
"sku": 3112,
"vše": 3113,
"tní": 3114,
"byla": 3115,
"ční": 3116,
"jeho": 3117,
"bý": 3118,
"vání": 3119,
"ných": 3120,
"tři": 3121,
"vz": 3122,
"stře": 3123,
"dva": 3124,
"hle": 3125,
"čá": 3126,
"nosti": 3127,
"vš": 3128,
"hra": 3129,
"jen": 3130,
"slo": 3131,
"však": 3132,
"kdy": 3133,
"bylo": 3134,
"bude": 3135,
"jší": 3136,
"vých": 3137,
"ním": 3138,
"sm": 3139,
"koli": 3140,
"rů": 3141,
"může": 3142,
"není": 3143,
"hod": 3144,
"bí": 3145,
"tý": 3146,
"stě": 3147,
"uje": 3148,
"sá": 3149,
"pět": 3150,
"krá": 3151,
"tom": 3152,
"ství": 3153,
"vně": 3154,
"sed": 3155,
"své": 3156,
"pí": 3157,
"musí": 3158,
"už": 3159,
"tím": 3160,
"jící": 3161,
"jedno": 3162,
"čas": 3163,
"čty": 3164,
"ský": 3165,
"evro": 3166,
"toho": 3167,
"hy": 3168,
"kter": 3169,
"rní": 3170,
"stí": 3171,
"svě": 3172,
"pak": 3173,
"všech": 3174,
"ků": 3175,
"ng": 3176,
"ád": 3177,
"chází": 3178,
"být": 3179,
"první": 3180,
"mno": 3181,
"ského": 3182,
"pá": 3183,
"nebo": 3184,
"kem": 3185,
"sla": 3186,
"ného": 3187,
"zde": 3188,
"další": 3189,
"řa": 3190,
"čtyři": 3191,
"hrá": 3192,
"druh": 3193,
"lně": 3194,
"vla": 3195,
"ských": 3196,
"ško": 3197,
"půso": 3198,
"proto": 3199,
"vů": 3200,
"ská": 3201,
"šest": 3202,
"dně": 3203,
"ještě": 3204,
"mezi": 3205,
"několi": 3206,
"již": 3207,
"čně": 3208,
"slu": 3209,
"zná": 3210,
"sedm": 3211,
"vlá": 3212,
"osm": 3213,
"byly": 3214,
"vám": 3215,
"cký": 3216,
"tech": 3217,
"ději": 3218,
"velmi": 3219,
"leži": 3220,
"vala": 3221,
"lý": 3222,
"tvo": 3223,
"spole": 3224,
"stup": 3225,
"mož": 3226,
"evrop": 3227,
"stal": 3228,
"jde": 3229,
"rodi": 3230,
"její": 3231,
"poli": 3232,
"devět": 3233,
"sme": 3234,
"až": 3235,
"této": 3236,
"tento": 3237,
"kaž": 3238,
"nula": 3239,
"bych": 3240,
"moc": 3241,
"stou": 3242,
"kdo": 3243,
"zd": 3244,
"praco": 3245,
"tomu": 3246,
"ným": 3247,
"živo": 3248,
"zem": 3249,
"násle": 3250,
"sky": 3251,
"jich": 3252,
"měl": 3253,
"děla": 3254,
"jsme": 3255,
"nice": 3256,
"stej": 3257,
"stní": 3258,
"náro": 3259,
"nit": 3260,
"později": 3261,
"tako": 3262,
"nce": 3263,
"čer": 3264,
"ším": 3265,
"něco": 3266,
"vál": 3267,
"řej": 3268,
"krát": 3269,
"ální": 3270,
"asi": 3271,
"které": 3272,
"stav": 3273,
"mají": 3274,
"mys": 3275,
"době": 3276,
"sně": 3277,
"zku": 3278,
"tů": 3279,
"chod": 3280,
"spě": 3281,
"jejich": 3282,
"součas": 3283,
"vali": 3284,
"kte": 3285,
"prů": 3286,
"zení": 3287,
"pat": 3288,
"potře": 3289,
"dnes": 3290,
"zemí": 3291,
"znam": 3292,
"mám": 3293,
"tedy": 3294,
"hlavní": 3295,
"použí": 3296,
"bní": 3297,
"vede": 3298,
"lep": 3299,
"jek": 3300,
"prav": 3301,
"politi": 3302,
"dne": 3303,
"čení": 3304,
"než": 3305,
"děl": 3306,
"čo": 3307,
"cích": 3308,
"sté": 3309,
"dlou": 3310,
"několik": 3311,
"vyu": 3312,
"ckých": 3313,
"nové": 3314,
"čin": 3315,
"dělá": 3316,
"ký": 3317,
"obla": 3318,
"podle": 3319,
"důleži": 3320,
"poku": 3321,
"kone": 3322,
"dý": 3323,
"dvě": 3324,
"žád": 3325,
"nout": 3326,
"tku": 3327,
"tvr": 3328,
"ckého": 3329,
"rov": 3330,
"tele": 3331,
"psa": 3332,
"svět": 3333,
"tivní": 3334,
"dosta": 3335,
"šel": 3336,
"druhé": 3337,
"skou": 3338,
"žo": 3339,
"jedná": 3340,
"význam": 3341,
"problé": 3342,
"publi": 3343,
"ván": 3344,
"odpo": 3345,
"podpo": 3346,
"dle": 3347,
"jaké": 3348,
"šení": 3349,
"vím": 3350,
"během": 3351,
"nachází": 3352,
"slou": 3353,
"pouze": 3354,
"otá": 3355,
"plo": 3356,
"tové": 3357,
"větši": 3358,
"komi": 3359,
"vají": 3360,
"tyto": 3361,
"zápa": 3362,
"změ": 3363,
"moh": 3364,
"více": 3365,
"společ": 3366,
"auto": 3367,
"proti": 3368,
"dět": 3369,
"cháze": 3370,
"žel": 3371,
"«": 3372,
"»": 3373,
"а": 3374,
"б": 3375,
"в": 3376,
"г": 3377,
"д": 3378,
"е": 3379,
"ж": 3380,
"з": 3381,
"и": 3382,
"й": 3383,
"к": 3384,
"л": 3385,
"м": 3386,
"н": 3387,
"о": 3388,
"п": 3389,
"р": 3390,
"с": 3391,
"т": 3392,
"у": 3393,
"ф": 3394,
"х": 3395,
"ц": 3396,
"ч": 3397,
"ш": 3398,
"щ": 3399,
"ъ": 3400,
"ы": 3401,
"ь": 3402,
"э": 3403,
"ю": 3404,
"я": 3405,
"ё": 3406,
"‑": 3407,
"−": 3408,
"ст": 3409,
"ен": 3410,
"но": 3411,
"на": 3412,
"пр": 3413,
"то": 3414,
"по": 3415,
"ра": 3416,
"го": 3417,
"ко": 3418,
"не": 3419,
"во": 3420,
"ва": 3421,
"ет": 3422,
"ер": 3423,
"ни": 3424,
"ел": 3425,
"ит": 3426,
"ны": 3427,
"за": 3428,
"ро": 3429,
"ени": 3430,
"ка": 3431,
"ли": 3432,
"ем": 3433,
"да": 3434,
"об": 3435,
"ла": 3436,
"до": 3437,
"ся": 3438,
"ть": 3439,
"от": 3440,
"ло": 3441,
"ль": 3442,
"ед": 3443,
"со": 3444,
"ми": 3445,
"ре": 3446,
"мо": 3447,
"ци": 3448,
"про": 3449,
"та": 3450,
"это": 3451,
"ки": 3452,
"ру": 3453,
"при": 3454,
"ти": 3455,
"се": 3456,
"ста": 3457,
"вы": 3458,
"мы": 3459,
"ви": 3460,
"бы": 3461,
"ма": 3462,
"ес": 3463,
"ля": 3464,
"сти": 3465,
"ле": 3466,
"что": 3467,
"ме": 3468,
"ри": 3469,
"ча": 3470,
"од": 3471,
"ей": 3472,
"ель": 3473,
"ения": 3474,
"га": 3475,
"ну": 3476,
"си": 3477,
"па": 3478,
"раз": 3479,
"бо": 3480,
"сто": 3481,
"су": 3482,
"са": 3483,
"ду": 3484,
"его": 3485,
"ест": 3486,
"ин": 3487,
"ить": 3488,
"из": 3489,
"же": 3490,
"му": 3491,
"пер": 3492,
"под": 3493,
"ение": 3494,
"сь": 3495,
"ку": 3496,
"пред": 3497,
"ного": 3498,
"ных": 3499,
"вер": 3500,
"те": 3501,
"ной": 3502,
"ции": 3503,
"де": 3504,
"ры": 3505,
"дел": 3506,
"лю": 3507,
"ве": 3508,
"он": 3509,
"мен": 3510,
"ги": 3511,
"ня": 3512,
"бу": 3513,
"пра": 3514,
"все": 3515,
"ется": 3516,
"сть": 3517,
"жа": 3518,
"дол": 3519,
"жи": 3520,
"бе": 3521,
"кон": 3522,
"сл": 3523,
"ши": 3524,
"ди": 3525,
"ств": 3526,
"ско": 3527,
"ные": 3528,
"чи": 3529,
"ют": 3530,
"дер": 3531,
"стра": 3532,
"ты": 3533,
"ход": 3534,
"щи": 3535,
"зо": 3536,
"зна": 3537,
"ности": 3538,
"чес": 3539,
"вля": 3540,
"вать": 3541,
"ор": 3542,
"пол": 3543,
"вет": 3544,
"так": 3545,
"ша": 3546,
"ту": 3547,
"сво": 3548,
"пре": 3549,
"она": 3550,
"итель": 3551,
"ный": 3552,
"сло": 3553,
"как": 3554,
"вл": 3555,
"ность": 3556,
"хо": 3557,
"мож": 3558,
"пе": 3559,
"для": 3560,
"ния": 3561,
"ное": 3562,
"рас": 3563,
"долж": 3564,
"дар": 3565,
"тель": 3566,
"ска": 3567,
"пу": 3568,
"ство": 3569,
"кото": 3570,
"раб": 3571,
"ее": 3572,
"род": 3573,
"эти": 3574,
"соб": 3575,
"ору": 3576,
"жен": 3577,
"ным": 3578,
"ити": 3579,
"ние": 3580,
"ком": 3581,
"дет": 3582,
"сту": 3583,
"гу": 3584,
"пи": 3585,
"меж": 3586,
"ению": 3587,
"тер": 3588,
"работ": 3589,
"воз": 3590,
"ция": 3591,
"кой": 3592,
"щест": 3593,
"гра": 3594,
"зи": 3595,
"ря": 3596,
"между": 3597,
"ства": 3598,
"вс": 3599,
"ело": 3600,
"ше": 3601,
"мер": 3602,
"ба": 3603,
"зы": 3604,
"лу": 3605,
"аль": 3606,
"дей": 3607,
"гла": 3608,
"народ": 3609,
"кти": 3610,
"предста": 3611,
"лся": 3612,
"явля": 3613,
"ски": 3614,
"нов": 3615,
"един": 3616,
"ров": 3617,
"ис": 3618,
"нима": 3619,
"рем": 3620,
"ходи": 3621,
"также": 3622,
"дру": 3623,
"ать": 3624,
"след": 3625,
"гово": 3626,
"ная": 3627,
"ющи": 3628,
"ень": 3629,
"которы": 3630,
"хот": 3631,
"ву": 3632,
"их": 3633,
"ему": 3634,
"чит": 3635,
"важ": 3636,
"орга": 3637,
"чески": 3638,
"ще": 3639,
"ке": 3640,
"ха": 3641,
"пос": 3642,
"том": 3643,
"боль": 3644,
"мне": 3645,
"пас": 3646,
"объ": 3647,
"прав": 3648,
"конф": 3649,
"слу": 3650,
"поддер": 3651,
"стви": 3652,
"наш": 3653,
"лько": 3654,
"стоя": 3655,
"ную": 3656,
"лем": 3657,
"енных": 3658,
"кра": 3659,
"ды": 3660,
"международ": 3661,
"гда": 3662,
"необ": 3663,
"госу": 3664,
"ству": 3665,
"ении": 3666,
"государ": 3667,
"кто": 3668,
"им": 3669,
"чест": 3670,
"рет": 3671,
"вопро": 3672,
"лен": 3673,
"ели": 3674,
"рова": 3675,
"ций": 3676,
"нам": 3677,
"этой": 3678,
"жения": 3679,
"необходи": 3680,
"меня": 3681,
"было": 3682,
"сили": 3683,
"фи": 3684,
"вя": 3685,
"шь": 3686,
"этого": 3687,
"они": 3688,
"органи": 3689,
"безо": 3690,
"проб": 3691,
"име": 3692,
"реш": 3693,
"би": 3694,
"безопас": 3695,
"ются": 3696,
"оста": 3697,
"енно": 3698,
"год": 3699,
"ела": 3700,
"представ": 3701,
"ться": 3702,
"слово": 3703,
"организа": 3704,
"должны": 3705,
"этом": 3706,
"бла": 3707,
"че": 3708,
"чу": 3709,
"благо": 3710,
"этому": 3711,
"врем": 3712,
"спе": 3713,
"ном": 3714,
"ений": 3715,
"спо": 3716,
"нас": 3717,
"нет": 3718,
"зу": 3719,
"вед": 3720,
"еще": 3721,
"сказа": 3722,
"сей": 3723,
"ерен": 3724,
"дан": 3725,
"сам": 3726,
"еля": 3727,
"ран": 3728,
"зыва": 3729,
"является": 3730,
"будет": 3731,
"ктив": 3732,
"тре": 3733,
"деле": 3734,
"мот": 3735,
"конферен": 3736,
"лась": 3737,
"час": 3738,
"сторо": 3739,
"кого": 3740,
"ез": 3741,
"ней": 3742,
"ос": 3743,
"лись": 3744,
"разору": 3745,
"пере": 3746,
"сси": 3747,
"ными": 3748,
"проц": 3749,
"голо": 3750,
"чело": 3751,
"боле": 3752,
"челове": 3753,
"сер": 3754,
"пл": 3755,
"чет": 3756,
"стран": 3757,
"пя": 3758,
"был": 3759,
"кла": 3760,
"тов": 3761,
"жд": 3762,
"дела": 3763,
"ера": 3764,
"уже": 3765,
"совет": 3766,
"ген": 3767,
"безопасности": 3768,
"ца": 3769,
"седа": 3770,
"поз": 3771,
"ответ": 3772,
"проблем": 3773,
"нако": 3774,
"тем": 3775,
"доста": 3776,
"пы": 3777,
"ща": 3778,
"вой": 3779,
"сущест": 3780,
"необходимо": 3781,
"быть": 3782,
"может": 3783,
"дем": 3784,
"чтобы": 3785,
"ек": 3786,
"чер": 3787,
"усили": 3788,
"рес": 3789,
"руд": 3790,
"единенных": 3791,
"доб": 3792,
"дости": 3793,
"ствен": 3794,
"ядер": 3795,
"годня": 3796,
"каза": 3797,
"сегодня": 3798,
"сейчас": 3799,
"только": 3800,
"вод": 3801,
"есь": 3802,
"много": 3803,
"буду": 3804,
"ев": 3805,
"есть": 3806,
"три": 3807,
"общест": 3808,
"явл": 3809,
"высту": 3810,
"ред": 3811,
"счит": 3812,
"сит": 3813,
"делега": 3814,
"лож": 3815,
"этот": 3816,
"фор": 3817,
"клю": 3818,
"возмож": 3819,
"вания": 3820,
"бли": 3821,
"или": 3822,
"вз": 3823,
"наций": 3824,
"ского": 3825,
"приня": 3826,
"пла": 3827,
"оч": 3828,
"иться": 3829,
"сте": 3830,
"наши": 3831,
"которые": 3832,
"ар": 3833,
"имеет": 3834,
"сот": 3835,
"знач": 3836,
"перь": 3837,
"следу": 3838,
"ены": 3839,
"таки": 3840,
"объединенных": 3841,
"стро": 3842,
"теперь": 3843,
"бле": 3844,
"благодар": 3845,
"разв": 3846,
"ан": 3847,
"жива": 3848,
"очень": 3849,
"ят": 3850,
"без": 3851,
"обес": 3852,
"гро": 3853,
"лось": 3854,
"сы": 3855,
"организации": 3856,
"член": 3857,
"того": 3858,
"ональ": 3859,
"жда": 3860,
"всех": 3861,
"свя": 3862,
"более": 3863,
"сов": 3864,
"когда": 3865,
"вот": 3866,
"кре": 3867,
"кры": 3868,
"поэтому": 3869,
"воль": 3870,
"ой": 3871,
"генера": 3872,
"чем": 3873,
"лы": 3874,
"полити": 3875,
"вен": 3876,
"конференции": 3877,
"процес": 3878,
"бя": 3879,
"ите": 3880,
"отно": 3881,
"развити": 3882,
"аф": 3883,
"ющ": 3884,
"вно": 3885,
"мир": 3886,
"нии": 3887,
"кая": 3888,
"ас": 3889,
"ительно": 3890,
"вто": 3891,
"ением": 3892,
"генераль": 3893,
"прот": 3894,
"всем": 3895,
"самбле": 3896,
"ассамбле": 3897,
"ом": 3898,
"зд": 3899,
"смот": 3900,
"реги": 3901,
"чего": 3902,
"однако": 3903,
"усилия": 3904,
"действи": 3905,
"чно": 3906,
"уча": 3907,
"образ": 3908,
"вос": 3909,
"эта": 3910,
"перего": 3911,
"говор": 3912,
"вам": 3913,
"моло": 3914,
"время": 3915,
"дь": 3916,
"хотел": 3917,
"гру": 3918,
"заявл": 3919,
"предоста": 3920,
"поль": 3921,
"нее": 3922,
"резо": 3923,
"перегово": 3924,
"резолю": 3925,
"крет": 3926,
"поддерж": 3927,
"обеспе": 3928,
"него": 3929,
"представит": 3930,
"наде": 3931,
"кри": 3932,
"чь": 3933,
"проек": 3934,
"лет": 3935,
"други": 3936,
"_": 3937,
"،": 3938,
"؛": 3939,
"؟": 3940,
"ء": 3941,
"آ": 3942,
"أ": 3943,
"ؤ": 3944,
"إ": 3945,
"ئ": 3946,
"ا": 3947,
"ب": 3948,
"ة": 3949,
"ت": 3950,
"ث": 3951,
"ج": 3952,
"ح": 3953,
"خ": 3954,
"د": 3955,
"ذ": 3956,
"ر": 3957,
"ز": 3958,
"س": 3959,
"ش": 3960,
"ص": 3961,
"ض": 3962,
"ط": 3963,
"ظ": 3964,
"ع": 3965,
"غ": 3966,
"ـ": 3967,
"ف": 3968,
"ق": 3969,
"ك": 3970,
"ل": 3971,
"م": 3972,
"ن": 3973,
"ه": 3974,
"و": 3975,
"ى": 3976,
"ي": 3977,
"ً": 3978,
"ٌ": 3979,
"ٍ": 3980,
"َ": 3981,
"ُ": 3982,
"ِ": 3983,
"ّ": 3984,
"ْ": 3985,
"ٰ": 3986,
"چ": 3987,
"ڨ": 3988,
"ک": 3989,
"ھ": 3990,
"ی": 3991,
"ۖ": 3992,
"ۗ": 3993,
"ۘ": 3994,
"ۚ": 3995,
"ۛ": 3996,
"—": 3997,
"☭": 3998,
"ﺃ": 3999,
"ﻻ": 4000,
"ال": 4001,
"َا": 4002,
"وَ": 4003,
"َّ": 4004,
"ِي": 4005,
"أَ": 4006,
"لَ": 4007,
"نَ": 4008,
"الْ": 4009,
"هُ": 4010,
"ُو": 4011,
"ما": 4012,
"نْ": 4013,
"من": 4014,
"عَ": 4015,
"نا": 4016,
"لا": 4017,
"مَ": 4018,
"تَ": 4019,
"فَ": 4020,
"أن": 4021,
"لي": 4022,
"مِ": 4023,
"ان": 4024,
"في": 4025,
"رَ": 4026,
"يَ": 4027,
"هِ": 4028,
"مْ": 4029,
"قَ": 4030,
"بِ": 4031,
"لى": 4032,
"ين": 4033,
"إِ": 4034,
"لِ": 4035,
"وا": 4036,
"كَ": 4037,
"ها": 4038,
"ًا": 4039,
"مُ": 4040,
"ون": 4041,
"الم": 4042,
"بَ": 4043,
"يا": 4044,
"ذا": 4045,
"سا": 4046,
"الل": 4047,
"مي": 4048,
"يْ": 4049,
"را": 4050,
"ري": 4051,
"لك": 4052,
"مَا": 4053,
"نَّ": 4054,
"لم": 4055,
"إن": 4056,
"ست": 4057,
"وم": 4058,
"َّا": 4059,
"لَا": 4060,
"هم": 4061,
"ِّ": 4062,
"كُ": 4063,
"كان": 4064,
"سَ": 4065,
"با": 4066,
"دي": 4067,
"حَ": 4068,
"عْ": 4069,
"بي": 4070,
"الأ": 4071,
"ول": 4072,
"فِي": 4073,
"رِ": 4074,
"دا": 4075,
"مِنْ": 4076,
"ُونَ": 4077,
"وْ": 4078,
"هَا": 4079,
"ُّ": 4080,
"الس": 4081,
"الَ": 4082,
"ني": 4083,
"لْ": 4084,
"تُ": 4085,
"هل": 4086,
"رة": 4087,
"دَ": 4088,
"سْ": 4089,
"تِ": 4090,
"نَا": 4091,
"رْ": 4092,
"اللَّ": 4093,
"سامي": 4094,
"كن": 4095,
"كل": 4096,
"هَ": 4097,
"عَلَ": 4098,
"على": 4099,
"مع": 4100,
"إلى": 4101,
"قد": 4102,
"الر": 4103,
"ُوا": 4104,
"ير": 4105,
"عن": 4106,
"يُ": 4107,
"نِ": 4108,
"بْ": 4109,
"الح": 4110,
"هُمْ": 4111,
"قا": 4112,
"ذه": 4113,
"الت": 4114,
"ِينَ": 4115,
"جَ": 4116,
"هذا": 4117,
"عد": 4118,
"الع": 4119,
"دْ": 4120,
"قَالَ": 4121,
"رُ": 4122,
"يم": 4123,
"ية": 4124,
"نُ": 4125,
"خَ": 4126,
"رب": 4127,
"الك": 4128,
"وَا": 4129,
"أنا": 4130,
"ةِ": 4131,
"الن": 4132,
"حد": 4133,
"عِ": 4134,
"تا": 4135,
"هو": 4136,
"فا": 4137,
"عا": 4138,
"الش": 4139,
"لُ": 4140,
"يت": 4141,
"ذَا": 4142,
"يع": 4143,
"الذ": 4144,
"حْ": 4145,
"الص": 4146,
"إِنَّ": 4147,
"جا": 4148,
"علي": 4149,
"كَا": 4150,
"بُ": 4151,
"تع": 4152,
"وق": 4153,
"مل": 4154,
"لَّ": 4155,
"يد": 4156,
"أخ": 4157,
"رف": 4158,
"تي": 4159,
"الِ": 4160,
"ّا": 4161,
"ذلك": 4162,
"أَنْ": 4163,
"سِ": 4164,
"توم": 4165,
"مر": 4166,
"مَنْ": 4167,
"بل": 4168,
"الق": 4169,
"الله": 4170,
"ِيَ": 4171,
"كم": 4172,
"ذَ": 4173,
"عل": 4174,
"حب": 4175,
"سي": 4176,
"عُ": 4177,
"الج": 4178,
"الد": 4179,
"شَ": 4180,
"تك": 4181,
"فْ": 4182,
"صَ": 4183,
"لل": 4184,
"دِ": 4185,
"بر": 4186,
"فِ": 4187,
"ته": 4188,
"أع": 4189,
"تْ": 4190,
"قْ": 4191,
"الْأَ": 4192,
"ئِ": 4193,
"عَنْ": 4194,
"ور": 4195,
"حا": 4196,
"الَّ": 4197,
"مت": 4198,
"فر": 4199,
"دُ": 4200,
"هنا": 4201,
"وَأَ": 4202,
"تب": 4203,
"ةُ": 4204,
"أي": 4205,
"سب": 4206,
"ريد": 4207,
"وج": 4208,
"كُمْ": 4209,
"حِ": 4210,
"كْ": 4211,
"در": 4212,
"َاء": 4213,
"هذه": 4214,
"الط": 4215,
"الْمُ": 4216,
"دة": 4217,
"قل": 4218,
"غَ": 4219,
"يوم": 4220,
"الَّذ": 4221,
"كر": 4222,
"تر": 4223,
"كِ": 4224,
"كي": 4225,
"عَلَى": 4226,
"رَب": 4227,
"عة": 4228,
"قُ": 4229,
"جْ": 4230,
"فض": 4231,
"لة": 4232,
"هْ": 4233,
"رَا": 4234,
"وَلَ": 4235,
"الْمَ": 4236,
"أَنَّ": 4237,
"يَا": 4238,
"أُ": 4239,
"شي": 4240,
"اللَّهُ": 4241,
"لَى": 4242,
"قِ": 4243,
"أت": 4244,
"عَلَيْ": 4245,
"اللَّهِ": 4246,
"الب": 4247,
"ضَ": 4248,
"ةً": 4249,
"قي": 4250,
"ار": 4251,
"بد": 4252,
"خْ": 4253,
"سْتَ": 4254,
"طَ": 4255,
"قَدْ": 4256,
"ذهب": 4257,
"أم": 4258,
"ماذا": 4259,
"وَإِ": 4260,
"ةٌ": 4261,
"ونَ": 4262,
"ليلى": 4263,
"ولا": 4264,
"حُ": 4265,
"هي": 4266,
"صل": 4267,
"الخ": 4268,
"ود": 4269,
"ليس": 4270,
"لدي": 4271,
"قال": 4272,
"كَانَ": 4273,
"مَّ": 4274,
"حي": 4275,
"تم": 4276,
"لن": 4277,
"وَلَا": 4278,
"بع": 4279,
"يمكن": 4280,
"سُ": 4281,
"ةَ": 4282,
"حت": 4283,
"رًا": 4284,
"كا": 4285,
"شا": 4286,
"هِمْ": 4287,
"لَهُ": 4288,
"زَ": 4289,
"داً": 4290,
"مس": 4291,
"كث": 4292,
"الْعَ": 4293,
"جِ": 4294,
"صْ": 4295,
"فَا": 4296,
"له": 4297,
"وي": 4298,
"عَا": 4299,
"هُوَ": 4300,
"بِي": 4301,
"بَا": 4302,
"أس": 4303,
"ثَ": 4304,
"لِي": 4305,
"رض": 4306,
"الرَّ": 4307,
"لِكَ": 4308,
"تَّ": 4309,
"فُ": 4310,
"قة": 4311,
"فعل": 4312,
"مِن": 4313,
"الآ": 4314,
"ثُ": 4315,
"سم": 4316,
"مَّا": 4317,
"بِهِ": 4318,
"تق": 4319,
"خر": 4320,
"لقد": 4321,
"خل": 4322,
"شر": 4323,
"أنت": 4324,
"لَّا": 4325,
"سن": 4326,
"السَّ": 4327,
"الذي": 4328,
"سَا": 4329,
"وما": 4330,
"زل": 4331,
"وب": 4332,
"أْ": 4333,
"إذا": 4334,
"رِي": 4335,
"حة": 4336,
"نِي": 4337,
"الْحَ": 4338,
"وَقَالَ": 4339,
"به": 4340,
"ةٍ": 4341,
"سأ": 4342,
"رٌ": 4343,
"بال": 4344,
"مة": 4345,
"شْ": 4346,
"وت": 4347,
"عند": 4348,
"فس": 4349,
"بَعْ": 4350,
"هر": 4351,
"قط": 4352,
"أح": 4353,
"إنه": 4354,
"وع": 4355,
"فت": 4356,
"غا": 4357,
"هناك": 4358,
"بت": 4359,
"مِنَ": 4360,
"سر": 4361,
"ذَلِكَ": 4362,
"رس": 4363,
"حدث": 4364,
"غْ": 4365,
"ِّي": 4366,
"الإ": 4367,
"وَيَ": 4368,
"جل": 4369,
"است": 4370,
"قِي": 4371,
"عب": 4372,
"وس": 4373,
"يش": 4374,
"الَّذِينَ": 4375,
"تاب": 4376,
"دِي": 4377,
"جب": 4378,
"كون": 4379,
"بن": 4380,
"الث": 4381,
"لَيْ": 4382,
"بعد": 4383,
"وَالْ": 4384,
"فَأَ": 4385,
"عم": 4386,
"هُم": 4387,
"تن": 4388,
"ذْ": 4389,
"أص": 4390,
"أين": 4391,
"رَبِّ": 4392,
"الذين": 4393,
"إِن": 4394,
"بين": 4395,
"جُ": 4396,
"عَلَيْهِ": 4397,
"حَا": 4398,
"لو": 4399,
"ستط": 4400,
"ظر": 4401,
"لَمْ": 4402,
"ءِ": 4403,
"كُل": 4404,
"طل": 4405,
"تَا": 4406,
"ضُ": 4407,
"كنت": 4408,
"لًا": 4409,
"مٌ": 4410,
"قبل": 4411,
"ــ": 4412,
"ذِ": 4413,
"قَوْ": 4414,
"صِ": 4415,
"مًا": 4416,
"كانت": 4417,
"صا": 4418,
"يق": 4419,
"الف": 4420,
"النا": 4421,
"مٍ": 4422,
"إِنْ": 4423,
"النَّ": 4424,
"جد": 4425,
"وَمَا": 4426,
"تت": 4427,
"بح": 4428,
"مكان": 4429,
"كيف": 4430,
"ّة": 4431,
"الا": 4432,
"جَا": 4433,
"أو": 4434,
"ساعد": 4435,
"ضِ": 4436,
"إلا": 4437,
"راً": 4438,
"قَا": 4439,
"رأ": 4440,
"عت": 4441,
"أحد": 4442,
"هد": 4443,
"ضا": 4444,
"طر": 4445,
"أق": 4446,
"ماء": 4447,
"دَّ": 4448,
"البا": 4449,
"مُو": 4450,
"أَوْ": 4451,
"طا": 4452,
"قُو": 4453,
"خِ": 4454,
"تل": 4455,
"ستطيع": 4456,
"دَا": 4457,
"النَّا": 4458,
"إلَى": 4459,
"وَتَ": 4460,
"هَذَا": 4461,
"بة": 4462,
"عليك": 4463,
"جر": 4464,
"المن": 4465,
"زا": 4466,
"رٍ": 4467,
"دع": 4468,
"ًّا": 4469,
"سة": 4470,
"ثُمَّ": 4471,
"شيء": 4472,
"الغ": 4473,
"تح": 4474,
"رُونَ": 4475,
"اليوم": 4476,
"مِي": 4477,
"نُوا": 4478,
"أر": 4479,
"تُمْ": 4480,
"عر": 4481,
"يف": 4482,
"أب": 4483,
"دًا": 4484,
"صَا": 4485,
"التَّ": 4486,
"أريد": 4487,
"الز": 4488,
"يَوْ": 4489,
"إلي": 4490,
"جي": 4491,
"يَعْ": 4492,
"فضل": 4493,
"الإن": 4494,
"أنه": 4495,
"1": 4496,
"2": 4497,
"3": 4498,
"4": 4499,
"5": 4500,
"·": 4501,
"×": 4502,
"̃": 4503,
"̌": 4504,
"ε": 4505,
"λ": 4506,
"μ": 4507,
"•": 4508,
"‧": 4509,
"─": 4510,
"□": 4511,
"、": 4512,
"。": 4513,
"〈": 4514,
"〉": 4515,
"《": 4516,
"》": 4517,
"「": 4518,
"」": 4519,
"『": 4520,
"』": 4521,
"ア": 4522,
"オ": 4523,
"カ": 4524,
"チ": 4525,
"ド": 4526,
"ベ": 4527,
"ャ": 4528,
"ヤ": 4529,
"ン": 4530,
"・": 4531,
"ー": 4532,
"ㄟ": 4533,
"!": 4534,
"(": 4535,
")": 4536,
",": 4537,
"-": 4538,
"/": 4539,
":": 4540,
";": 4541,
"?": 4542,
"p": 4543,
"i4": 4544,
"zh": 4545,
"i2": 4546,
"ng1": 4547,
"u4": 4548,
"i1": 4549,
"ng2": 4550,
"u3": 4551,
"de5": 4552,
"e4": 4553,
"i3": 4554,
"ng4": 4555,
"an4": 4556,
"shi4": 4557,
"an2": 4558,
"u2": 4559,
"u1": 4560,
"ng3": 4561,
"a1": 4562,
"an1": 4563,
"e2": 4564,
"a4": 4565,
"ei4": 4566,
"ong1": 4567,
"ai4": 4568,
"ao4": 4569,
"ang1": 4570,
"an3": 4571,
"wei4": 4572,
"uo2": 4573,
"n1": 4574,
"en2": 4575,
"ao3": 4576,
"e1": 4577,
"qi": 4578,
"eng2": 4579,
"zho": 4580,
"ang3": 4581,
"ang4": 4582,
"ang2": 4583,
"uo4": 4584,
"ge4": 4585,
"yi1": 4586,
"guo2": 4587,
"a3": 4588,
"he2": 4589,
"e3": 4590,
"yi2": 4591,
"di4": 4592,
"zhong1": 4593,
"bu4": 4594,
"ai2": 4595,
"n2": 4596,
"zai4": 4597,
"shi2": 4598,
"eng1": 4599,
"ren2": 4600,
"ong2": 4601,
"xian4": 4602,
"xu": 4603,
"n4": 4604,
"li4": 4605,
"en4": 4606,
"yu2": 4607,
"ei2": 4608,
"yi2ge4": 4609,
"ou4": 4610,
"ei3": 4611,
"ui4": 4612,
"a2": 4613,
"you3": 4614,
"ao1": 4615,
"da4": 4616,
"cheng2": 4617,
"en1": 4618,
"eng4": 4619,
"yi4": 4620,
"si1": 4621,
"zhi4": 4622,
"jia1": 4623,
"yuan2": 4624,
"ta1": 4625,
"de5yi2ge4": 4626,
"ke1": 4627,
"shu3": 4628,
"xi1": 4629,
"ji2": 4630,
"ao2": 4631,
"ou3": 4632,
"ong4": 4633,
"xia4": 4634,
"ai1": 4635,
"gong1": 4636,
"zhi1": 4637,
"en3": 4638,
"wei2": 4639,
"xue2": 4640,
"qu1": 4641,
"zhou1": 4642,
"er3": 4643,
"ming2": 4644,
"zhong3": 4645,
"li3": 4646,
"wu4": 4647,
"yi3": 4648,
"uo1": 4649,
"e5": 4650,
"ji4": 4651,
"xing2": 4652,
"jian4": 4653,
"hua4": 4654,
"yu3": 4655,
"uo3": 4656,
"ji1": 4657,
"ai3": 4658,
"zuo4": 4659,
"hou4": 4660,
"hui4": 4661,
"ei1": 4662,
"nian2": 4663,
"qi2": 4664,
"dao4": 4665,
"sheng1": 4666,
"de2": 4667,
"dai4": 4668,
"uan2": 4669,
"zhe4": 4670,
"zheng4": 4671,
"ben3": 4672,
"shang4": 4673,
"zhu3": 4674,
"bei4": 4675,
"ye4": 4676,
"chu1": 4677,
"zhan4": 4678,
"le5": 4679,
"lai2": 4680,
"shi3": 4681,
"nan2": 4682,
"ren4": 4683,
"you2": 4684,
"ke4": 4685,
"ba1": 4686,
"fu4": 4687,
"dui4": 4688,
"ya4": 4689,
"mei3": 4690,
"zi4": 4691,
"xin1": 4692,
"jing1": 4693,
"zhu": 4694,
"n3": 4695,
"yong4": 4696,
"mu4": 4697,
"jiao4": 4698,
"ye3": 4699,
"jin4": 4700,
"bian4": 4701,
"lu4": 4702,
"qi1": 4703,
"she4": 4704,
"xiang1": 4705,
"ong3": 4706,
"shu4": 4707,
"dong4": 4708,
"suo3": 4709,
"guan1": 4710,
"san1": 4711,
"te4": 4712,
"duo1": 4713,
"fu2": 4714,
"min2": 4715,
"la1": 4716,
"zhi2": 4717,
"zhen4": 4718,
"ou1": 4719,
"wu3": 4720,
"ma3": 4721,
"i5": 4722,
"zi5": 4723,
"ju4": 4724,
"er4": 4725,
"yao4": 4726,
"xia4de5yi2ge4": 4727,
"si4": 4728,
"tu2": 4729,
"shan1": 4730,
"zui4": 4731,
"yin1": 4732,
"er2": 4733,
"tong2": 4734,
"dong1": 4735,
"yu4": 4736,
"yan2": 4737,
"qian2": 4738,
"shu3xia4de5yi2ge4": 4739,
"jun1": 4740,
"ke3": 4741,
"wen2": 4742,
"fa3": 4743,
"luo2": 4744,
"zhu4": 4745,
"xi4": 4746,
"kou3": 4747,
"bei3": 4748,
"jian1": 4749,
"fa1": 4750,
"dian4": 4751,
"jiang1": 4752,
"wei4yu2": 4753,
"xiang4": 4754,
"zhi3": 4755,
"eng3": 4756,
"fang1": 4757,
"lan2": 4758,
"shu": 4759,
"ri4": 4760,
"lian2": 4761,
"shou3": 4762,
"qiu2": 4763,
"jin1": 4764,
"huo4": 4765,
"shu3xia4de5yi2ge4zhong3": 4766,
"fen1": 4767,
"nei4": 4768,
"gai1": 4769,
"mei3guo2": 4770,
"un2": 4771,
"ge2": 4772,
"bao3": 4773,
"qing1": 4774,
"gao1": 4775,
"tai2": 4776,
"xiao3": 4777,
"jie2": 4778,
"tian1": 4779,
"chang2": 4780,
"quan2": 4781,
"lie4": 4782,
"hai3": 4783,
"fei1": 4784,
"ti3": 4785,
"jue2": 4786,
"ou2": 4787,
"ci3": 4788,
"zu2": 4789,
"ni2": 4790,
"biao3": 4791,
"zhong1guo2": 4792,
"du4": 4793,
"yue4": 4794,
"xing4": 4795,
"sheng4": 4796,
"che1": 4797,
"dan1": 4798,
"jie1": 4799,
"lin2": 4800,
"ping2": 4801,
"fu3": 4802,
"gu3": 4803,
"jie4": 4804,
"v3": 4805,
"sheng3": 4806,
"na4": 4807,
"yuan4": 4808,
"zhang3": 4809,
"guan3": 4810,
"dao3": 4811,
"zu3": 4812,
"ding4": 4813,
"dian3": 4814,
"ceng2": 4815,
"ren2kou3": 4816,
"tai4": 4817,
"tong1": 4818,
"guo4": 4819,
"neng2": 4820,
"chang3": 4821,
"hua2": 4822,
"liu2": 4823,
"ying1": 4824,
"xiao4": 4825,
"ci4": 4826,
"bian4hua4": 4827,
"liang3": 4828,
"gong4": 4829,
"zhong4": 4830,
"de5yi1": 4831,
"se4": 4832,
"kai1": 4833,
"wang2": 4834,
"jiu4": 4835,
"shi1": 4836,
"shou4": 4837,
"mei2": 4838,
"feng1": 4839,
"ze2": 4840,
"tu2shi4": 4841,
"ti2": 4842,
"qi4": 4843,
"jiu3": 4844,
"shen1": 4845,
"zhe3": 4846,
"ren2kou3bian4hua4": 4847,
"ren2kou3bian4hua4tu2shi4": 4848,
"di4qu1": 4849,
"yang2": 4850,
"men5": 4851,
"long2": 4852,
"bing4": 4853,
"chan3": 4854,
"zhu1": 4855,
"wei3": 4856,
"wai4": 4857,
"xing1": 4858,
"bo1": 4859,
"bi3": 4860,
"tang2": 4861,
"hua1": 4862,
"bo2": 4863,
"shui3": 4864,
"shu1": 4865,
"dou1": 4866,
"sai4": 4867,
"chao2": 4868,
"bi4": 4869,
"ling2": 4870,
"lei4": 4871,
"da4xue2": 4872,
"fen4": 4873,
"shu3de5": 4874,
"mu3": 4875,
"jiao1": 4876,
"dang1": 4877,
"cheng1": 4878,
"tong3": 4879,
"nv3": 4880,
"qi3": 4881,
"yan3": 4882,
"mian4": 4883,
"luo4": 4884,
"jing4": 4885,
"ge1": 4886,
"ru4": 4887,
"dan4": 4888,
"ri4ben3": 4889,
"pu3": 4890,
"yun4": 4891,
"huang2": 4892,
"wo3": 4893,
"lv": 4894,
"hai2": 4895,
"shi4yi1": 4896,
"xie1": 4897,
"ying3": 4898,
"wu2": 4899,
"shen2": 4900,
"wang3": 4901,
"guang3": 4902,
"liu4": 4903,
"su4": 4904,
"shi4zhen4": 4905,
"can1": 4906,
"cao3": 4907,
"xia2": 4908,
"ka3": 4909,
"da2": 4910,
"hu4": 4911,
"ban4": 4912,
"dang3": 4913,
"hu2": 4914,
"zong3": 4915,
"deng3": 4916,
"de5yi2ge4shi4zhen4": 4917,
"chuan2": 4918,
"mo4": 4919,
"zhang1": 4920,
"ban1": 4921,
"mo2": 4922,
"cha2": 4923,
"ce4": 4924,
"zhu3yao4": 4925,
"tou2": 4926,
"ju2": 4927,
"shi4wei4yu2": 4928,
"sa4": 4929,
"un1": 4930,
"ke3yi3": 4931,
"du1": 4932,
"han4": 4933,
"liang4": 4934,
"sha1": 4935,
"jia3": 4936,
"zi1": 4937,
"lv4": 4938,
"fu1": 4939,
"xian1": 4940,
"xu4": 4941,
"guang1": 4942,
"meng2": 4943,
"bao4": 4944,
"you4": 4945,
"rong2": 4946,
"zhi1yi1": 4947,
"wei1": 4948,
"mao2": 4949,
"guo2jia1": 4950,
"cong2": 4951,
"gou4": 4952,
"tie3": 4953,
"zhen1": 4954,
"du2": 4955,
"bian1": 4956,
"ci2": 4957,
"qu3": 4958,
"fan4": 4959,
"xiang3": 4960,
"men2": 4961,
"ju1": 4962,
"hong2": 4963,
"zi3": 4964,
"ta1men5": 4965,
"ji3": 4966,
"zong1": 4967,
"zhou1de5yi2ge4shi4zhen4": 4968,
"tuan2": 4969,
"jing3": 4970,
"gong1si1": 4971,
"xie4": 4972,
"li2": 4973,
"li4shi3": 4974,
"bao1": 4975,
"gang3": 4976,
"gui1": 4977,
"zheng1": 4978,
"zhi2wu4": 4979,
"ta1de5": 4980,
"pin3": 4981,
"zhuan1": 4982,
"chong2": 4983,
"shi3yong4": 4984,
"wa3": 4985,
"shuo1": 4986,
"chuan1": 4987,
"lei2": 4988,
"wan1": 4989,
"huo2": 4990,
"su1": 4991,
"zao3": 4992,
"gai3": 4993,
"qu4": 4994,
"gu4": 4995,
"xi2": 4996,
"hang2": 4997,
"ying4": 4998,
"cun1": 4999,
"gen1": 5000,
"ying2": 5001,
"ting2": 5002,
"cheng2shi4": 5003,
"jiang3": 5004,
"ling3": 5005,
"lun2": 5006,
"bu4fen4": 5007,
"deng1": 5008,
"xuan3": 5009,
"dong4wu4": 5010,
"de2guo2": 5011,
"xian3": 5012,
"fan3": 5013,
"zhe5": 5014,
"han2": 5015,
"hao4": 5016,
"mi4": 5017,
"ran2": 5018,
"qin1": 5019,
"tiao2": 5020,
"zhan3": 5021,
"[ar]": 5022,
"[zh-cn]": 5023,
"shi": 5026,
"tsu": 5027,
"teki": 5028,
"nai": 5029,
"aru": 5030,
"uu": 5031,
"kai": 5032,
"shite": 5033,
"mono": 5034,
"koto": 5035,
"kara": 5036,
"shita": 5037,
"suru": 5038,
"masu": 5039,
"tai": 5040,
"ware": 5041,
"shin": 5042,
"oku": 5043,
"yuu": 5044,
"iru": 5045,
"jiko": 5046,
"desu": 5047,
"rare": 5048,
"shou": 5049,
"sha": 5050,
"sekai": 5051,
"kyou": 5052,
"mashita": 5053,
"nara": 5054,
"kei": 5055,
"ita": 5056,
"ari": 5057,
"itsu": 5058,
"kono": 5059,
"naka": 5060,
"chou": 5061,
"sore": 5062,
"naru": 5063,
"gaku": 5064,
"reba": 5065,
"hito": 5066,
"sai": 5067,
"nan": 5068,
"dai": 5069,
"tsuku": 5070,
"shiki": 5071,
"sare": 5072,
"naku": 5073,
"jun": 5074,
"kaku": 5075,
"zai": 5076,
"wata": 5077,
"shuu": 5078,
"ii": 5079,
"kare": 5080,
"shii": 5081,
"made": 5082,
"sho": 5083,
"kereba": 5084,
"shika": 5085,
"ichi": 5086,
"deki": 5087,
"nin": 5088,
"wareware": 5089,
"nakereba": 5090,
"oite": 5091,
"yaku": 5092,
"mujun": 5093,
"yoku": 5094,
"butsu": 5095,
"omo": 5096,
"gae": 5097,
"naranai": 5098,
"tachi": 5099,
"chuu": 5100,
"kangae": 5101,
"toki": 5102,
"koro": 5103,
"mujunteki": 5104,
"naga": 5105,
"jin": 5106,
"shima": 5107,
"iku": 5108,
"imasu": 5109,
"hon": 5110,
"kae": 5111,
"kore": 5112,
"kita": 5113,
"datta": 5114,
"jitsu": 5115,
"mae": 5116,
"toku": 5117,
"douitsu": 5118,
"ritsu": 5119,
"kyuu": 5120,
"hyou": 5121,
"rareta": 5122,
"keisei": 5123,
"kkan": 5124,
"rareru": 5125,
"mou": 5126,
"doko": 5127,
"ryou": 5128,
"dake": 5129,
"nakatta": 5130,
"soko": 5131,
"tabe": 5132,
"hana": 5133,
"fuku": 5134,
"yasu": 5135,
"wataku": 5136,
"yama": 5137,
"kyo": 5138,
"genzai": 5139,
"boku": 5140,
"ata": 5141,
"kawa": 5142,
"masen": 5143,
"juu": 5144,
"natte": 5145,
"watakushi": 5146,
"yotte": 5147,
"hai": 5148,
"jishin": 5149,
"rete": 5150,
"oka": 5151,
"kagaku": 5152,
"natta": 5153,
"karu": 5154,
"nari": 5155,
"mata": 5156,
"kuru": 5157,
"gai": 5158,
"kari": 5159,
"shakai": 5160,
"koui": 5161,
"yori": 5162,
"setsu": 5163,
"reru": 5164,
"tokoro": 5165,
"jutsu": 5166,
"saku": 5167,
"ttai": 5168,
"ningen": 5169,
"tame": 5170,
"kankyou": 5171,
"ooku": 5172,
"watashi": 5173,
"tsukuru": 5174,
"sugi": 5175,
"jibun": 5176,
"shitsu": 5177,
"keru": 5178,
"kishi": 5179,
"shikashi": 5180,
"moto": 5181,
"mari": 5182,
"itte": 5183,
"deshita": 5184,
"nde": 5185,
"arimasu": 5186,
"koe": 5187,
"zettai": 5188,
"kkanteki": 5189,
"rekishi": 5190,
"dekiru": 5191,
"tsuka": 5192,
"itta": 5193,
"kobutsu": 5194,
"miru": 5195,
"shoku": 5196,
"shimasu": 5197,
"gijutsu": 5198,
"gyou": 5199,
"joushiki": 5200,
"atta": 5201,
"hodo": 5202,
"koko": 5203,
"tsukurareta": 5204,
"zoku": 5205,
"hitei": 5206,
"koku": 5207,
"rekishiteki": 5208,
"kete": 5209,
"kako": 5210,
"nagara": 5211,
"kakaru": 5212,
"shutai": 5213,
"haji": 5214,
"taku": 5215,
"douitsuteki": 5216,
"mete": 5217,
"tsuu": 5218,
"sarete": 5219,
"genjitsu": 5220,
"bai": 5221,
"nawa": 5222,
"jikan": 5223,
"waru": 5224,
"rt": 5225,
"atsu": 5226,
"soku": 5227,
"kouiteki": 5228,
"kata": 5229,
"tetsu": 5230,
"gawa": 5231,
"kedo": 5232,
"reta": 5233,
"sayou": 5234,
"tteru": 5235,
"tori": 5236,
"kimi": 5237,
"mura": 5238,
"sareru": 5239,
"machi": 5240,
"kya": 5241,
"osa": 5242,
"konna": 5243,
"aku": 5244,
"sareta": 5245,
"ipp": 5246,
"shiku": 5247,
"uchi": 5248,
"hitotsu": 5249,
"hatara": 5250,
"tachiba": 5251,
"shiro": 5252,
"katachi": 5253,
"tomo": 5254,
"ete": 5255,
"meru": 5256,
"nichi": 5257,
"dare": 5258,
"katta": 5259,
"eru": 5260,
"suki": 5261,
"ooki": 5262,
"maru": 5263,
"moku": 5264,
"oko": 5265,
"kangaerareru": 5266,
"oto": 5267,
"tanni": 5268,
"tada": 5269,
"taiteki": 5270,
"motte": 5271,
"kinou": 5272,
"shinai": 5273,
"kki": 5274,
"tari": 5275,
"ranai": 5276,
"kkou": 5277,
"mirai": 5278,
"ppon": 5279,
"goto": 5280,
"hitsu": 5281,
"teru": 5282,
"mochi": 5283,
"katsu": 5284,
"nyuu": 5285,
"zuka": 5286,
"tsuite": 5287,
"nomi": 5288,
"sugu": 5289,
"kuda": 5290,
"tetsugaku": 5291,
"ika": 5292,
"ronri": 5293,
"oki": 5294,
"nippon": 5295,
"shimashita": 5296,
"chishiki": 5297,
"chokkanteki": 5298,
"suko": 5299,
"kuu": 5300,
"arou": 5301,
"katte": 5302,
"kuri": 5303,
"inai": 5304,
"hyougen": 5305,
"ishiki": 5306,
"doku": 5307,
"atte": 5308,
"atara": 5309,
"wari": 5310,
"kao": 5311,
"seisan": 5312,
"hanashi": 5313,
"kake": 5314,
"naji": 5315,
"sunawa": 5316,
"sunawachi": 5317,
"ugo": 5318,
"suu": 5319,
"bara": 5320,
"hiro": 5321,
"iwa": 5322,
"betsu": 5323,
"yoi": 5324,
"seru": 5325,
"shiteru": 5326,
"rarete": 5327,
"toshi": 5328,
"seki": 5329,
"tairitsu": 5330,
"wakara": 5331,
"tokyo": 5332,
"kka": 5333,
"kyoku": 5334,
"iro": 5335,
"mite": 5336,
"saki": 5337,
"kanji": 5338,
"mita": 5339,
"sube": 5340,
"ryoku": 5341,
"matta": 5342,
"kudasai": 5343,
"omoi": 5344,
"wareru": 5345,
"hitsuyou": 5346,
"kashi": 5347,
"renai": 5348,
"kankei": 5349,
"gatte": 5350,
"ochi": 5351,
"motsu": 5352,
"sonzai": 5353,
"taishite": 5354,
"ame": 5355,
"seimei": 5356,
"kano": 5357,
"giri": 5358,
"kangaeru": 5359,
"yue": 5360,
"asa": 5361,
"onaji": 5362,
"yoru": 5363,
"niku": 5364,
"osaka": 5365,
"sukoshi": 5366,
"tama": 5367,
"kanojo": 5368,
"kite": 5369,
"mondai": 5370,
"amari": 5371,
"eki": 5372,
"kojin": 5373,
"haya": 5374,
"dete": 5375,
"atarashii": 5376,
"awa": 5377,
"gakkou": 5378,
"tsuzu": 5379,
"shukan": 5380,
"imashita": 5381,
"atae": 5382,
"darou": 5383,
"hataraku": 5384,
"gata": 5385,
"dachi": 5386,
"matsu": 5387,
"arimasen": 5388,
"seibutsu": 5389,
"mitsu": 5390,
"heya": 5391,
"yasui": 5392,
"deni": 5393,
"noko": 5394,
"haha": 5395,
"domo": 5396,
"kami": 5397,
"sudeni": 5398,
"nao": 5399,
"raku": 5400,
"ike": 5401,
"meta": 5402,
"kodomo": 5403,
"soshite": 5404,
"game": 5405,
"bakari": 5406,
"tote": 5407,
"hatsu": 5408,
"mise": 5409,
"mokuteki": 5410,
"dakara": 5411,
"[ja]": 5412
},
"merges": [
"t h",
"i n",
"th e",
"a n",
"e r",
"o u",
"r e",
"o n",
"a t",
"e d",
"e n",
"t o",
"in g",
"an d",
"i s",
"a s",
"a l",
"o r",
"o f",
"a r",
"i t",
"e s",
"h e",
"s t",
"l e",
"o m",
"s e",
"b e",
"a d",
"o w",
"l y",
"c h",
"w h",
"th at",
"y ou",
"l i",
"v e",
"a c",
"t i",
"l d",
"m e",
"w as",
"g h",
"i d",
"l l",
"w i",
"en t",
"f or",
"a y",
"r o",
"v er",
"i c",
"h er",
"k e",
"h is",
"n o",
"u t",
"u n",
"i r",
"l o",
"w e",
"r i",
"h a",
"wi th",
"gh t",
"ou t",
"i m",
"i on",
"al l",
"a b",
"on e",
"n e",
"g e",
"ou ld",
"t er",
"m o",
"h ad",
"c e",
"s he",
"g o",
"s h",
"u r",
"a m",
"s o",
"p e",
"m y",
"d e",
"a re",
"b ut",
"om e",
"f r",
"the r",
"f e",
"s u",
"d o",
"c on",
"t e",
"a in",
"er e",
"p o",
"i f",
"the y",
"u s",
"a g",
"t r",
"n ow",
"ou n",
"th is",
"ha ve",
"no t",
"s a",
"i l",
"u p",
"th ing",
"fr om",
"a p",
"h im",
"ac k",
"at ion",
"an t",
"ou r",
"o p",
"li ke",
"u st",
"es s",
"b o",
"o k",
"u l",
"in d",
"e x",
"c om",
"s ome",
"the re",
"er s",
"c o",
"re s",
"m an",
"ar d",
"p l",
"w or",
"w ay",
"ti on",
"f o",
"c a",
"w ere",
"b y",
"at e",
"p ro",
"t ed",
"oun d",
"ow n",
"w ould",
"t s",
"wh at",
"q u",
"al ly",
"i ght",
"c k",
"g r",
"wh en",
"v en",
"c an",
"ou gh",
"in e",
"en d",
"p er",
"ou s",
"o d",
"id e",
"k now",
"t y",
"ver y",
"s i",
"a k",
"wh o",
"ab out",
"i ll",
"the m",
"es t",
"re d",
"y e",
"c ould",
"on g",
"you r",
"the ir",
"e m",
"j ust",
"o ther",
"in to",
"an y",
"wh i",
"u m",
"t w",
"as t",
"d er",
"d id",
"i e",
"be en",
"ac e",
"in k",
"it y",
"b ack",
"t ing",
"b r",
"mo re",
"a ke",
"p p",
"the n",
"s p",
"e l",
"u se",
"b l",
"sa id",
"o ver",
"ge t",
"e n",
"e r",
"c h",
"e i",
"i e",
"u n",
"i ch",
"ei n",
"s t",
"a n",
"t e",
"g e",
"a u",
"i n",
"s ch",
"d er",
"un d",
"d ie",
"d a",
"e s",
"a l",
"d en",
"a r",
"g en",
"z u",
"d e",
"h r",
"o n",
"t en",
"e l",
"o r",
"m i",
"s ie",
"da s",
"a t",
"b e",
"ein e",
"ich t",
"b er",
"l e",
"a ch",
"v er",
"s e",
"au f",
"w i",
"s o",
"t er",
"l ich",
"c k",
"u r",
"n icht",
"m m",
"b en",
"a s",
"w ar",
"r e",
"mi t",
"s ich",
"i g",
"l l",
"au s",
"i st",
"w ie",
"o ch",
"un g",
"an n",
"ü r",
"h n",
"i hr",
"s a",
"s en",
"t z",
"de m",
"ei t",
"u m",
"h at",
"wi r",
"v on",
"h a",
"s p",
"w ei",
"i er",
"r o",
"h er",
"r a",
"ein en",
"n e",
"v or",
"al s",
"an d",
"al l",
"w as",
"w o",
"r ei",
"st e",
"l ie",
"au ch",
"d u",
"d es",
"k o",
"ü ber",
"a m",
"b ei",
"h en",
"h m",
"l ei",
"a ber",
"w en",
"h l",
"g er",
"i m",
"u t",
"n ach",
"h e",
"i s",
"b r",
"f t",
"en t",
"i mm",
"j e",
"sch en",
"w er",
"s er",
"a b",
"ä n",
"m e",
"s ein",
"i t",
"o l",
"ch t",
"f ür",
"k l",
"f f",
"eine m",
"n en",
"w e",
"j a",
"u s",
"n och",
"hat te",
"t r",
"p f",
"h in",
"d i",
"ch en",
"b l",
"m an",
"r ü",
"ie l",
"s el",
"das s",
"i hn",
"mi r",
"sch l",
"ö n",
"g an",
"g t",
"ein er",
"st en",
"m ich",
"wen n",
"el l",
"g te",
"in d",
"m al",
"ge l",
"k en",
"n ur",
"mm en",
"f ü",
"er n",
"ö r",
"un ter",
"f r",
"an der",
"g r",
"i l",
"d ur",
"u ch",
"f e",
"t a",
"m en",
"m ach",
"d och",
"t i",
"dur ch",
"o s",
"g l",
"h al",
"ihr e",
"w ä",
"imm er",
"i hm",
"k ann",
"or t",
"d ann",
"l an",
"tz t",
"o der",
"hr en",
"e t",
"k ön",
"i ck",
"f a",
"in g",
"i r",
"wie der",
"da ß",
"m ein",
"f en",
"gan z",
"die se",
"st er",
"da r",
"w a",
"ge s",
"n a",
"f l",
"i gen",
"sch e",
"un gen",
"me hr",
"ß en",
"o t",
"k on",
"ge w",
"ha ben",
"ge h",
"ä t",
"s ind",
"d r",
"w el",
"un s",
"v o",
"m a",
"u te",
"sch on",
"b es",
"ge sch",
"b t",
"ch e",
"s on",
"o b",
"l a",
"p p",
"rü ck",
"s eine",
"k r",
"f re",
"ei l",
"zu m",
"u l",
"h ier",
"k t",
"i ge",
"sp r",
"k e",
"le ben",
"b st",
"z eit",
"i on",
"g ro",
"den n",
"h o",
"sch a",
"b ar",
"al le",
"ge gen",
"w ür",
"m ü",
"z e",
"wer den",
"je tzt",
"ko mmen",
"n ie",
"s ei",
"h eit",
"so ll",
"g lei",
"m eine",
"wo ll",
"n er",
"ha be",
"w ur",
"lich en",
"p er",
"as sen",
"n te",
"se hen",
"wir d",
"b is",
"g ar",
"i en",
"m us",
"u ß",
"ä r",
"st ell",
"k eit",
"z wei",
"sel bst",
"st a",
"p a",
"sa gte",
"te t",
"k am",
"s sen",
"v iel",
"u g",
"z en",
"h ei",
"m ann",
"wi ll",
"ge b",
"war en",
"ü ck",
"ä ch",
"m er",
"r u",
"w or",
"h au",
"ei gen",
"an g",
"we g",
"bl ick",
"f ra",
"all es",
"k a",
"au gen",
"f in",
"lich e",
"t o",
"un ser",
"der n",
"her r",
"n un",
"v ie",
"ch te",
"wo hl",
"f all",
"h t",
"ü n",
"et was",
"st and",
"en d",
"ä u",
"e m",
"m ö",
"te l",
"r ie",
"d ich",
"die s",
"h and",
"b in",
"ff en",
"nicht s",
"d an",
"p l",
"hn e",
"ihn en",
"es en",
"die ser",
"fr au",
"an t",
"ar t",
"di r",
"i sch",
"er st",
"glei ch",
"ko mm",
"h ör",
"ß e",
"d ig",
"se hr",
"z ei",
"sa m",
"au m",
"h ät",
"in gen",
"g ut",
"b o",
"m ut",
"ck en",
"kon nte",
"st imm",
"p ro",
"zu r",
"i tz",
"wei l",
"wür de",
"f ä",
"kön nen",
"k eine",
"f er",
"i schen",
"vo ll",
"ein es",
"se tz",
"z ie",
"de l",
"te te",
"sein er",
"ier en",
"ge st",
"zu rück",
"wur de",
"sch n",
"p r",
"lie ß",
"t ra",
"m ä",
"gen d",
"f ol",
"i k",
"schl a",
"scha ft",
"at er",
"wei ß",
"s einen",
"l assen",
"l u",
"und en",
"t eil",
"ne u",
"ier t",
"men schen",
"hm en",
"st r",
"g i",
"sa h",
"ihr en",
"el n",
"wei ter",
"ge hen",
"ig er",
"mach t",
"ta g",
"al so",
"hal ten",
"n is",
"ach t",
"ge ben",
"f or",
"o g",
"n at",
"m ar",
"de t",
"o hne",
"h aus",
"t ro",
"an ge",
"l au",
"sp iel",
"t re",
"sch r",
"in n",
"s u",
"l os",
"mach en",
"hät te",
"be g",
"wir k",
"al t",
"g lich",
"te s",
"r icht",
"fre und",
"m o",
"ihr er",
"f el",
"b el",
"so l",
"ein mal",
"e ben",
"h ol",
"h än",
"q u",
"ter n",
"h ö",
"sch w",
"re cht",
"wa hr",
"s einem",
"ste hen",
"hl en",
"in s",
"g ing",
"woll te",
"wi ssen",
"ung s",
"al d",
"as s",
"ja hr",
"m or",
"wel t",
"un der",
"zu sa",
"at ion",
"ko pf",
"lan g",
"hin ter",
"at z",
"st ra",
"an gen",
"an k",
"a de",
"gl au",
"f ach",
"hat ten",
"l o",
"f ort",
"ei cht",
"i ff",
"l er",
"m ei",
"diese m",
"k ein",
"f rei",
"fü hr",
"vo m",
"e s",
"e n",
"a i",
"o u",
"o n",
"l e",
"d e",
"r e",
"q u",
"a n",
"e r",
"en t",
"e t",
"l a",
"n e",
"i l",
"a r",
"i s",
"ai t",
"t e",
"a u",
"i n",
"qu e",
"i t",
"u r",
"s e",
"l es",
"c h",
"c e",
"m e",
"o r",
"ou r",
"a s",
"p r",
"a v",
"o m",
"ai s",
"u n",
"an t",
"ou s",
"t r",
"t i",
"l u",
"o i",
"e u",
"l le",
"s i",
"p ar",
"d es",
"an s",
"m ent",
"é t",
"es t",
"j e",
"u ne",
"a l",
"p as",
"t re",
"qu i",
"d u",
"r i",
"c on",
"s on",
"c om",
"e lle",
"d é",
"p our",
"d ans",
"l i",
"s a",
"r é",
"t ou",
"v ous",
"d i",
"v i",
"a g",
"a m",
"a t",
"ou v",
"a p",
"ti on",
"m on",
"s ur",
"c i",
"o s",
"p lu",
"s u",
"en d",
"a b",
"è re",
"ai n",
"m ais",
"o is",
"r es",
"plu s",
"é e",
"ai ent",
"m p",
"ch e",
"lu i",
"av e",
"ét ait",
"m a",
"s es",
"tou t",
"i r",
"v o",
"a c",
"s er",
"an d",
"f f",
"oi r",
"g r",
"av ait",
"é s",
"m es",
"n ous",
"eu x",
"b i",
"t er",
"c o",
"on s",
"p u",
"c es",
"g e",
"t u",
"le ur",
"pr o",
"d on",
"e ur",
"et te",
"ai re",
"ave c",
"d it",
"t é",
"i e",
"u s",
"il le",
"p er",
"com me",
"c r",
"or t",
"m i",
"e x",
"u x",
"v er",
"m o",
"è s",
"v e",
"au x",
"r a",
"j our",
"il s",
"bi en",
"c ou",
"p e",
"que l",
"p eu",
"c ette",
"t es",
"p o",
"in s",
"c u",
"m ê",
"s o",
"f ait",
"g u",
"m ar",
"ê tre",
"l o",
"it é",
"f r",
"a tion",
"en s",
"b r",
"n i",
"l é",
"d is",
"b le",
"m an",
"n é",
"pu is",
"mê me",
"qu es",
"f i",
"e l",
"ag e",
"g ar",
"m oi",
"en ce",
"on t",
"m ain",
"or s",
"au t",
"an ce",
"v en",
"m é",
"s ans",
"e m",
"s é",
"l on",
"h om",
"r o",
"u t",
"c ar",
"ab le",
"i m",
"de r",
"ch er",
"n o",
"vi e",
"au s",
"b e",
"de ux",
"en f",
"o ù",
"t en",
"p h",
"u re",
"te mp",
"p os",
"r ent",
"p é",
"f aire",
"p i",
"tr es",
"ç a",
"an g",
"end re",
"f or",
"p a",
"b on",
"s ou",
"in t",
"pr é",
"s ent",
"t ant",
"n er",
"c er",
"l à",
"l ais",
"pr ès",
"b re",
"c our",
"p et",
"i on",
"i ne",
"com p",
"l ait",
"tr ouv",
"t a",
"ent re",
"son t",
"de v",
"n u",
"temp s",
"d ou",
"r ait",
"b ou",
"qu and",
"jour s",
"l an",
"er s",
"av oir",
"ét é",
"a le",
"p re",
"f ois",
"or te",
"v é",
"m er",
"n on",
"t ous",
"j us",
"cou p",
"t s",
"hom me",
"ê te",
"a d",
"aus si",
"ur s",
"se u",
"or d",
"o b",
"m in",
"g é",
"co re",
"v a",
"v re",
"en core",
"se m",
"i te",
"au tre",
"pr is",
"peu t",
"u e",
"an te",
"m al",
"g n",
"ré p",
"h u",
"si on",
"vo tre",
"di re",
"e z",
"f em",
"leur s",
"m et",
"f in",
"c ri",
"m is",
"t our",
"r ai",
"j am",
"re gar",
"ri en",
"ver s",
"su is",
"p ouv",
"o p",
"v is",
"gr and",
"ant s",
"c or",
"re r",
"ar d",
"c é",
"t ent",
"pr es",
"v ou",
"f a",
"al ors",
"si eur",
"ai ne",
"le r",
"qu oi",
"f on",
"end ant",
"ar ri",
"eu re",
"a près",
"don c",
"it u",
"l è",
"s ait",
"t oi",
"ch a",
"ai l",
"as se",
"i mp",
"vo y",
"con n",
"p la",
"pet it",
"av ant",
"n om",
"t in",
"don t",
"d a",
"s ous",
"e mp",
"per son",
"el les",
"be au",
"par ti",
"ch o",
"pr it",
"tou jours",
"m en",
"r ais",
"jam ais",
"tr av",
"tion s",
"tr ès",
"v oi",
"r en",
"y eux",
"f er",
"v oir",
"pre mi",
"c a",
"g ne",
"h eure",
"r ou",
"e ff",
"no tre",
"ment s",
"t on",
"f ais",
"ce la",
"i er",
"rép on",
"con s",
"ai r",
"ô t",
"p endant",
"i ci",
"tou te",
"j et",
"p ort",
"ét aient",
"p en",
"h é",
"au tres",
"p ère",
"o c",
"quel ques",
"i que",
"l is",
"fem me",
"j ou",
"te ur",
"mon de",
"u se",
"n es",
"d re",
"a ff",
"r ap",
"par t",
"le ment",
"c la",
"f ut",
"quel que",
"pr endre",
"r ê",
"ai lle",
"s ais",
"ch es",
"le t",
"ch ar",
"è res",
"ent s",
"b er",
"g er",
"mo ins",
"e au",
"a î",
"j eu",
"h eur",
"é es",
"tr i",
"po int",
"m om",
"v ent",
"n ouv",
"gr an",
"tr ois",
"s ant",
"tout es",
"con tre",
"è rent",
"che z",
"ave z",
"û t",
"a lle",
"at t",
"p au",
"p orte",
"ouv er",
"b ar",
"l it",
"f ort",
"o t",
"as s",
"pr és",
"cho se",
"v it",
"mon sieur",
"h ab",
"t ête",
"j u",
"te ment",
"c tion",
"v rai",
"la r",
"c et",
"regar d",
"l ant",
"de m",
"s om",
"mom ent",
"il les",
"p le",
"p s",
"b es",
"m ère",
"c l",
"s our",
"y s",
"tr op",
"en ne",
"jus qu",
"av aient",
"av ais",
"jeu ne",
"de puis",
"person ne",
"f it",
"cer t",
"j o",
"g es",
"ou i",
"r est",
"sem b",
"c ap",
"m at",
"m u",
"lon g",
"fr an",
"f aut",
"it i",
"b li",
"che v",
"pr i",
"ent e",
"ain si",
"ch am",
"l ors",
"c as",
"d o",
"il i",
"b é",
"n os",
"an ge",
"su i",
"r it",
"cr o",
"gu e",
"d e",
"e n",
"e s",
"o s",
"l a",
"e r",
"q u",
"a r",
"a n",
"o n",
"qu e",
"a s",
"o r",
"e l",
"d o",
"a l",
"c i",
"u n",
"r e",
"a b",
"i n",
"t e",
"t o",
"s e",
"d i",
"t r",
"d a",
"c on",
"t a",
"s u",
"m i",
"c o",
"t i",
"l e",
"l os",
"n o",
"l o",
"í a",
"c u",
"c a",
"s i",
"v i",
"m e",
"p or",
"m o",
"p ar",
"r a",
"r i",
"la s",
"c h",
"r o",
"m a",
"p er",
"ó n",
"m en",
"de s",
"un a",
"m p",
"s o",
"ab a",
"p u",
"d os",
"t u",
"g u",
"er a",
"de l",
"h a",
"m u",
"l i",
"en t",
"m b",
"h ab",
"es t",
"g o",
"p a",
"r es",
"par a",
"p o",
"á s",
"m os",
"tr a",
"t en",
"an do",
"p i",
"qu i",
"b i",
"m an",
"co mo",
"v e",
"m ás",
"j o",
"ci ón",
"i s",
"t an",
"v o",
"da d",
"c e",
"a do",
"v er",
"f u",
"ci a",
"c er",
"p e",
"c as",
"c ar",
"men te",
"n i",
"su s",
"t ar",
"n a",
"f i",
"t er",
"z a",
"p ro",
"tr o",
"s a",
"l u",
"b a",
"per o",
"s er",
"c es",
"d as",
"d u",
"s in",
"e mp",
"m ar",
"l la",
"e x",
"á n",
"c or",
"i a",
"v a",
"r an",
"ch o",
"g a",
"y o",
"t os",
"c os",
"mi s",
"l es",
"t es",
"v en",
"h o",
"y a",
"en te",
"on es",
"hab ía",
"n u",
"u s",
"p as",
"h i",
"n os",
"es ta",
"la n",
"m as",
"t or",
"l le",
"h e",
"s on",
"b re",
"p re",
"ab an",
"d or",
"í an",
"i r",
"t as",
"é n",
"r u",
"en do",
"a que",
"er o",
"i o",
"qu é",
"m in",
"c ab",
"j a",
"de r",
"t al",
"é s",
"se ñ",
"or a",
"to do",
"la r",
"d on",
"g ar",
"s al",
"p r",
"cu ando",
"j e",
"h u",
"g un",
"b u",
"g i",
"d ar",
"n e",
"r as",
"de n",
"es to",
"par e",
"p en",
"é l",
"tr as",
"c an",
"b o",
"j os",
"mi en",
"pu e",
"c re",
"co mp",
"p on",
"d ía",
"tr os",
"s ab",
"so bre",
"es e",
"mb re",
"er on",
"a ñ",
"m or",
"f or",
"i do",
"por que",
"el la",
"p ri",
"g ran",
"f a",
"c en",
"di s",
"c ri",
"mu y",
"ch a",
"c al",
"es te",
"h as",
"c ó",
"g ra",
"r os",
"p os",
"o b",
"al l",
"aque l",
"j u",
"p res",
"m er",
"di jo",
"c ía",
"ent re",
"z o",
"ci ones",
"bi en",
"mb i",
"el o",
"t ó",
"in a",
"to dos",
"g en",
"ti en",
"est aba",
"de ci",
"ci o",
"h er",
"ñ o",
"l or",
"nu es",
"me di",
"l en",
"vi da",
"f e",
"al i",
"m on",
"c la",
"d re",
"pu es",
"al es",
"vo l",
"m í",
"r ar",
"b le",
"ci on",
"has ta",
"señ or",
"con o",
"a h",
"di os",
"s en",
"es a",
"ú n",
"v ar",
"s an",
"gu i",
"a c",
"o tros",
"ta do",
"bu en",
"ñ a",
"ti emp",
"ha cer",
"j er",
"f er",
"v u",
"f in",
"an a",
"as í",
"an tes",
"t in",
"ve z",
"mien to",
"j ar",
"la b",
"ch e",
"cas a",
"d r",
"es o",
"e go",
"di ó",
"an te",
"est á",
"m al",
"en cia",
"el i",
"í as",
"tiemp o",
"z ar",
"v an",
"m un",
"er ta",
"ta mbi",
"s í",
"b ar",
"a un",
"al e",
"mis mo",
"ent es",
"vi s",
"man o",
"el e",
"na da",
"se gu",
"me j",
"er ra",
"ab le",
"b e",
"ti r",
"un o",
"don de",
"to da",
"des de",
"r en",
"tambi én",
"cu er",
"per son",
"ho mbre",
"o tro",
"li b",
"tr ar",
"cu al",
"ha y",
"a u",
"ca da",
"t aba",
"i mp",
"men to",
"ten ía",
"qu er",
"er an",
"si emp",
"siemp re",
"er to",
"qu í",
"g os",
"pu és",
"el los",
"des pués",
"nu e",
"g an",
"l lo",
"in ter",
"có mo",
"tr i",
"ah ora",
"us te",
"tr aba",
"la do",
"in o",
"po co",
"er te",
"mu jer",
"i m",
"qui er",
"al gun",
"fu e",
"o jos",
"ent on",
"v os",
"es per",
"mu ch",
"o tra",
"a z",
"a d",
"in g",
"e za",
"a quí",
"ci as",
"gu a",
"mu cho",
"deci r",
"es ti",
"i dad",
"al go",
"e z",
"o cu",
"enton ces",
"di do",
"ent os",
"g ri",
"da do",
"i os",
"so l",
"dos e",
"uste d",
"qui en",
"a mi",
"un to",
"f r",
"mi r",
"mej or",
"b as",
"so lo",
"pre gun",
"tu r",
"al g",
"p la",
"to das",
"par te",
"e mb",
"c to",
"mun do",
"tien e",
"tan te",
"pa lab",
"tr an",
"aque lla",
"ci os",
"aun que",
"a y",
"cu en",
"ten er",
"f un",
"res pon",
"all í",
"x i",
"h an",
"pen s",
"con tra",
"tu ra",
"v al",
"di o",
"tr es",
"t re",
"tan to",
"ca min",
"m ó",
"es p",
"a da",
"í o",
"in s",
"ha cia",
"de j",
"est ar",
"i ón",
"g as",
"b er",
"v as",
"no che",
"é r",
"añ os",
"pa dre",
"gu s",
"á r",
"sin o",
"man os",
"ci do",
"es tu",
"a de",
"hu bi",
"vi r",
"b ri",
"ra z",
"ch i",
"pue de",
"men os",
"hab i",
"ho mb",
"ne ces",
"ma y",
"er os",
"r ía",
"he cho",
"es cu",
"l ti",
"án do",
"b us",
"cos as",
"t ú",
"es pa",
"re ci",
"c tor",
"pri m",
"di a",
"de se",
"mien tras",
"h or",
"fu er",
"i da",
"pos i",
"lan te",
"t on",
"an o",
"est as",
"p li",
"ch ar",
"lu ego",
"si ón",
"ci n",
"ti erra",
"m es",
"gu ar",
"ca do",
"en con",
"pr en",
"may or",
"f al",
"e r",
"o n",
"a n",
"t o",
"d i",
"r e",
"l a",
"i n",
"e n",
"a l",
"t a",
"c h",
"e l",
"r i",
"c o",
"t i",
"t e",
"s i",
"r a",
"u n",
"l e",
"l i",
"ch e",
"r o",
"c i",
"c a",
"s e",
"q u",
"m a",
"p o",
"s o",
"i l",
"d o",
"e s",
"v a",
"p er",
"l o",
"c on",
"d el",
"p a",
"m o",
"s a",
"p i",
"d a",
"m i",
"g i",
"s u",
"d e",
"v i",
"z i",
"m e",
"g li",
"n o",
"m en",
"v o",
"t u",
"n on",
"v e",
"t to",
"s t",
"on e",
"an o",
"ch i",
"er a",
"er e",
"f a",
"c e",
"z a",
"un a",
"b i",
"p re",
"s ta",
"o r",
"a r",
"f i",
"on o",
"t ra",
"n a",
"n el",
"n e",
"p ro",
"t ro",
"al e",
"v er",
"n i",
"c u",
"t ti",
"men te",
"del la",
"t er",
"zi one",
"g u",
"p e",
"t ta",
"an do",
"t à",
"al i",
"u o",
"qu el",
"co m",
"s en",
"co me",
"b a",
"al la",
"p ri",
"d u",
"qu es",
"l u",
"on i",
"g gi",
"pa r",
"s si",
"v en",
"in a",
"g a",
"pi ù",
"ci a",
"i m",
"co r",
"m an",
"in o",
"in i",
"t en",
"r an",
"b b",
"g o",
"s to",
"t re",
"a ve",
"a v",
"s ono",
"er i",
"a c",
"s se",
"er o",
"h a",
"s c",
"su l",
"f or",
"v ano",
"po r",
"s ti",
"su o",
"c chi",
"t an",
"z za",
"an che",
"p u",
"i o",
"t te",
"vo l",
"es s",
"s ci",
"co l",
"r u",
"p en",
"f u",
"al l",
"s so",
"s te",
"se m",
"s sa",
"d en",
"a d",
"t ri",
"de i",
"in e",
"ave va",
"men to",
"z z",
"a mo",
"g no",
"f o",
"un o",
"su a",
"g en",
"ri a",
"g e",
"st ra",
"s ì",
"c er",
"ch é",
"b u",
"a p",
"c en",
"d al",
"on a",
"s pe",
"g ni",
"b o",
"t t",
"del le",
"ques to",
"nel la",
"f f",
"d ere",
"an no",
"del l",
"un i",
"bb e",
"an ti",
"g ra",
"s p",
"en e",
"gi o",
"u to",
"qu al",
"gli a",
"qu ando",
"tu tto",
"c an",
"gli o",
"zi oni",
"ca m",
"h o",
"es so",
"s s",
"mo l",
"a t",
"lo ro",
"per ché",
"co sa",
"du e",
"po i",
"ca r",
"s co",
"ci o",
"to r",
"c co",
"c re",
"a m",
"g na",
"te m",
"pri ma",
"lu i",
"co sì",
"qu e",
"gu ar",
"ess ere",
"an i",
"con o",
"b ra",
"al le",
"m on",
"ri o",
"an co",
"cu i",
"s pi",
"vi a",
"g ran",
"gi or",
"a i",
"bi le",
"u l",
"ggi o",
"f e",
"an te",
"ma i",
"ta re",
"in ter",
"in di",
"re bbe",
"sen za",
"so lo",
"zi o",
"e d",
"en te",
"tu tti",
"sta to",
"zi a",
"d alla",
"tu ra",
"mi a",
"vi ta",
"quel la",
"qu a",
"ma r",
"do ve",
"g h",
"al lo",
"sem pre",
"zz o",
"si a",
"mo r",
"do po",
"por ta",
"d re",
"c cia",
"er ano",
"an ni",
"di o",
"chi a",
"en za",
"pro pri",
"qu i",
"m u",
"m b",
"an da",
"c ca",
"o cchi",
"ques ta",
"f fi",
"le i",
"par te",
"d on",
"r on",
"mi o",
"tan to",
"ri s",
"o gni",
"di s",
"r in",
"fa r",
"men ti",
"t el",
"anco ra",
"f ra",
"fa tto",
"man i",
"sen ti",
"p ra",
"tem po",
"es si",
"b bi",
"f in",
"a re",
"la re",
"per s",
"f on",
"b el",
"so r",
"d er",
"pre n",
"an za",
"di re",
"pi e",
"o ra",
"ver so",
"se gu",
"al tro",
"ta to",
"ca to",
"a to",
"vol ta",
"c c",
"fa re",
"pa re",
"ci ò",
"li b",
"bi li",
"n uo",
"s er",
"quel lo",
"co lo",
"p po",
"ca sa",
"tro va",
"o re",
"f er",
"r ono",
"d es",
"mol to",
"al mente",
"s ca",
"vo le",
"t ali",
"sul la",
"s ce",
"men o",
"an to",
"p un",
"s tu",
"ca pi",
"so l",
"gi u",
"m ini",
"m ano",
"z e",
"pi a",
"par ti",
"s al",
"la vo",
"ver o",
"r si",
"al tri",
"es ti",
"s cia",
"suo i",
"gli e",
"so tto",
"b ene",
"sc ri",
"t ale",
"de gli",
"n u",
"al c",
"uo mo",
"p el",
"f re",
"po te",
"es sa",
"s cu",
"si gno",
"el e",
"st ro",
"u ti",
"di a",
"si one",
"g re",
"f ini",
"ar ri",
"l un",
"c ri",
"e si",
"pa ssa",
"r à",
"men tre",
"an d",
"h anno",
"el o",
"u sci",
"gi a",
"gi à",
"di e",
"m ina",
"b e",
"ti ca",
"gior no",
"t in",
"es se",
"mo do",
"c al",
"s pa",
"propri o",
"l en",
"o ri",
"con tro",
"st ru",
"di ven",
"di sse",
"ra to",
"no i",
"v ere",
"pu ò",
"di ce",
"s an",
"es a",
"c ci",
"se con",
"re n",
"c cio",
"qual che",
"tu tta",
"g g",
"mon do",
"for ma",
"p li",
"m ma",
"pen sa",
"de va",
"tu r",
"fo sse",
"so pra",
"ta mente",
"n ess",
"qu anto",
"ra ga",
"un que",
"ca re",
"st re",
"gran de",
"pi cco",
"guar da",
"b en",
"nel l",
"a ff",
"po ssi",
"pre sen",
"r ò",
"pa ro",
"tu a",
"v in",
"an e",
"a s",
"ste sso",
"da v",
"ne i",
"nel le",
"gh i",
"pi o",
"ta r",
"an a",
"la to",
"si d",
"f ine",
"f uo",
"m er",
"z o",
"qua si",
"ul ti",
"i to",
"su e",
"si e",
"f il",
"allo ra",
"m in",
"ven i",
"t ano",
"el lo",
"d e",
"r a",
"e s",
"d o",
"e n",
"q u",
"c o",
"a s",
"o s",
"e r",
"a r",
"s e",
"qu e",
"a n",
"i n",
"i s",
"t o",
"ã o",
"t e",
"d a",
"m a",
"e l",
"t a",
"o r",
"i a",
"r e",
"e m",
"a l",
"co m",
"p a",
"o u",
"c a",
"u m",
"r o",
"v a",
"t i",
"s o",
"m en",
"n ão",
"h a",
"co n",
"m e",
"r i",
"pa ra",
"p o",
"d i",
"s a",
"v o",
"u ma",
"c i",
"n a",
"p or",
"n o",
"g u",
"s u",
"h o",
"an do",
"t ra",
"e i",
"v i",
"e u",
"i m",
"do s",
"el e",
"r es",
"m o",
"en t",
"f i",
"l a",
"e ra",
"l e",
"de s",
"el a",
"men te",
"l h",
"p er",
"l i",
"ç ão",
"m as",
"t er",
"m u",
"es t",
"v e",
"g o",
"l o",
"u s",
"ma is",
"v er",
"c ê",
"in ha",
"vo cê",
"f a",
"t u",
"c u",
"p ar",
"com o",
"p ro",
"s i",
"m os",
"e c",
"p re",
"d as",
"ç a",
"es ta",
"s er",
"u n",
"da de",
"d is",
"f o",
"e x",
"c h",
"i r",
"ra n",
"t ar",
"en te",
"g a",
"t r",
"p e",
"t os",
"b o",
"c ia",
"p en",
"c ar",
"s en",
"su a",
"se m",
"c as",
"f or",
"to u",
"n os",
"te m",
"r ia",
"m es",
"se u",
"co r",
"o n",
"a o",
"p os",
"ra m",
"v el",
"é m",
"t en",
"po de",
"t es",
"esta va",
"c e",
"b a",
"qu ando",
"m i",
"qu er",
"men to",
"se gu",
"t as",
"is so",
"mu i",
"g ar",
"t ro",
"d u",
"fa z",
"õ es",
"p es",
"an to",
"l u",
"p i",
"i x",
"ve z",
"s im",
"j a",
"p r",
"m in",
"b e",
"ra s",
"m an",
"p res",
"est á",
"c er",
"b re",
"p as",
"d ia",
"m b",
"dis se",
"n i",
"r os",
"es se",
"v ia",
"o lh",
"is a",
"an te",
"ê n",
"z a",
"qu i",
"b i",
"t inha",
"me u",
"s ão",
"m inha",
"a c",
"ri o",
"m ar",
"a t",
"p el",
"mui to",
"ta l",
"to r",
"fo i",
"h or",
"j o",
"b em",
"g i",
"f al",
"vo l",
"po n",
"di z",
"l ar",
"gu n",
"m or",
"r u",
"par ec",
"ç o",
"do r",
"pes so",
"n e",
"f er",
"b er",
"p u",
"po is",
"in a",
"es p",
"d ar",
"en do",
"de n",
"so bre",
"co s",
"p ri",
"al i",
"mes mo",
"ç ões",
"g ra",
"se us",
"me i",
"b ra",
"vi da",
"an tes",
"b ri",
"at é",
"ên cia",
"lh e",
"ti v",
"m ã",
"al g",
"qu anto",
"s ó",
"g os",
"de r",
"t ão",
"tu do",
"ent ão",
"r ou",
"es s",
"in da",
"b al",
"in do",
"ci o",
"n do",
"j á",
"va m",
"re i",
"l es",
"ei to",
"v is",
"tem po",
"de pois",
"c ha",
"m el",
"ch e",
"l ha",
"a inda",
"faz er",
"con tra",
"p ou",
"per gun",
"de ix",
"ta mb",
"ra r",
"al a",
"v en",
"t in",
"pel o",
"tamb ém",
"fi ca",
"pre c",
"el es",
"tra n",
"ha via",
"l á",
"to dos",
"j u",
"qu al",
"c an",
"ta do",
"cas a",
"es sa",
"n as",
"g em",
"m em",
"se i",
"na da",
"sen ti",
"c ri",
"ó s",
"de u",
"ei ro",
". .",
"f un",
"as sim",
"s ou",
"ent re",
"com e",
"i or",
"h ar",
"f e",
"por que",
"s or",
"f in",
"ta mente",
"a qui",
"cu l",
"t ó",
"for ma",
"s ar",
"ou tra",
"olh os",
"i ma",
"m im",
"a go",
"in s",
"co u",
"g ran",
"v al",
"pesso as",
"era m",
"ei ra",
"a que",
"com p",
"de i",
"p ela",
"co isa",
"m ão",
"con h",
"ca da",
"ago ra",
"ia m",
"h á",
"con s",
"su as",
"gu ém",
"o b",
"l an",
"es ti",
"á s",
"la do",
"in ter",
"ca be",
"por ta",
"n em",
"í vel",
"r is",
"j e",
"n un",
"sem pre",
"con segu",
"h as",
"tra bal",
"f u",
"le v",
"l em",
"l as",
"va i",
"tr os",
"t ante",
"te i",
"pr ó",
"que m",
"tu ra",
"on de",
"cabe ça",
"nun ca",
"men tos",
"h um",
"de le",
"ver dade",
"t á",
"h os",
"el i",
"ent es",
"m er",
"alg um",
"diz er",
"s in",
"pen as",
"n ós",
"en quanto",
"ou tro",
"l ho",
"es te",
"mel hor",
"est ar",
"g an",
"b ar",
"pri mei",
"a u",
"i u",
"pen sa",
"a penas",
"p ra",
"es tou",
"con te",
"res pon",
"ho mem",
"do is",
"a do",
"c al",
"a b",
"l os",
"ç as",
"pou co",
"sen hor",
"t ando",
"esp era",
"pa i",
"ri os",
"no i",
"i da",
"ba ix",
"as e",
"is as",
"f r",
"ho ra",
"mu ndo",
"pas sa",
"fi car",
"to do",
"se ja",
"al mente",
"â n",
"c lar",
"a d",
"in c",
"f os",
"lo n",
"g ri",
"ou vi",
"v em",
"g e",
"ta va",
"á rio",
"mo n",
"s os",
"in ho",
"ma l",
"t an",
"t re",
"gran de",
"ran do",
"b u",
"v ou",
"ê s",
"co isas",
"a conte",
"lh er",
"g en",
"ci on",
"an os",
"i do",
"tal vez",
"est ão",
"li v",
"sa b",
"su r",
"ou tros",
"c re",
"qual quer",
"g ou",
"t ri",
"l í",
"tiv esse",
"ra do",
"prec isa",
"mã e",
"su s",
"t anto",
"de la",
"men os",
"s al",
"en tra",
"p é",
"ma ior",
"noi te",
"ti va",
"p ala",
"so n",
"ra ção",
"de us",
"s as",
"un i",
"l or",
"u l",
"in te",
"f ei",
"an o",
"par ti",
"pala v",
"tr ás",
"par te",
"b el",
"ci dade",
"lu gar",
"v os",
"vez es",
"do u",
"en contra",
"tr u",
"e ci",
"a r",
"e r",
"a n",
"e n",
"i n",
"i r",
"o r",
"d e",
"a k",
"ı n",
"a l",
"d i",
"d a",
"b u",
"b ir",
"y or",
"i l",
"e k",
"y a",
"m a",
"l a",
"e l",
"u n",
"k a",
"l ar",
"i m",
"d ı",
"e t",
"o n",
"d u",
"o l",
"e y",
"t ı",
"m i",
"h a",
"b a",
"l er",
"ü n",
"m ı",
"i z",
"l e",
"ı r",
"m e",
"i s",
"n e",
"o k",
"t a",
"s a",
"u m",
"r a",
"g ö",
"i k",
"s ı",
"d en",
"e s",
"b il",
"t i",
"l ı",
"ü z",
"i ç",
"ü r",
"g i",
"u r",
"t e",
"b en",
"d an",
"i y",
"ı m",
"u z",
"v e",
"c ak",
"a y",
"c e",
"i ş",
"ın ı",
"i yor",
"ba ş",
"d ü",
"a t",
"a m",
"g el",
"de ğ",
"k ar",
"i ̇",
"m u",
"e v",
"ö y",
"bu n",
"v ar",
"ya p",
"s en",
"an a",
"s un",
"in i",
"gö r",
"y ı",
"k i",
"l i",
"ar a",
"al ı",
"on u",
"ç ı",
"ş ey",
"s ın",
"k ı",
"ka d",
"s e",
"t an",
"a ğ",
"değ il",
"s in",
"ü k",
"a z",
"ç ok",
"s on",
"ş ı",
"b i",
"ü l",
"t u",
"v er",
"iç in",
"g e",
"k en",
"ey e",
"ol du",
"mı ş",
"y e",
"k al",
"m ek",
"l an",
"öy le",
"yor du",
"er i",
"y üz",
"mi ş",
"b e",
"m ak",
"o la",
"in e",
"y an",
"h er",
"c ek",
"yor um",
"b ak",
"ü m",
"ö n",
"lar ı",
"o ğ",
"d er",
"kad ar",
"h al",
"ar ı",
"s t",
"s an",
"ın da",
"du r",
"g ün",
"v a",
"y ok",
"y er",
"dı m",
"k o",
"da ha",
"l u",
"ın a",
"di m",
"e m",
"bil ir",
"ik i",
"s iz",
"s i",
"n a",
"di ğ",
"s u",
"b ü",
"ha y",
"s or",
"dü ş",
"ü ç",
"un u",
"ö r",
"d ir",
"m ü",
"c a",
"am an",
"f ak",
"a da",
"e de",
"son ra",
"h iç",
"ak i",
"ğ ı",
"bu l",
"r u",
"ma z",
"an la",
"bu ra",
"ge ç",
"ma ya",
"l en",
"k onu",
"c i",
"c u",
"d in",
"t ek",
"z aman",
"el er",
"ö z",
"dı r",
"gi bi",
"o t",
"ş a",
"g er",
"ler i",
"k im",
"k u",
"fak at",
"y ar",
"gö z",
"c ı",
"yor sun",
"b ek",
"in de",
"r o",
"p ek",
"bun u",
"l ik",
"m an",
"il er",
"e di",
"ö l",
"s ür",
"b in",
"s ır",
"çı k",
"sı l",
"al ar",
"k es",
"y ak",
"ç ek",
"yı l",
"e cek",
"ı z",
"gi t",
"ka p",
"a ma",
"ı l",
"lar ın",
"b iz",
"tı r",
"o y",
"an cak",
"d oğ",
"ç a",
"b ana",
"ş im",
"baş la",
"l ü",
"ma dı",
"ben i",
"t ir",
"y ük",
"lı k",
"be ş",
"b el",
"b er",
"m er",
"na sıl",
"tı k",
"k e",
"t ür",
"a v",
". .",
"d aki",
"p ar",
"t er",
"ce ğ",
"t en",
"z ı",
"iy i",
"d ok",
"ben im",
"c ağ",
"n er",
"y en",
"ş u",
"me z",
"düş ün",
"ken di",
"şim di",
"y ol",
"y u",
"de v",
"is te",
"s ek",
"ma m",
"s öyle",
"di k",
"t o",
"k ur",
"oldu ğ",
"s ını",
"t ar",
"bil iyor",
"k an",
"y al",
"m eye",
"mu ş",
"f a",
"ka ç",
"bil e",
"iy e",
"t ü",
"e f",
"tı m",
"ev et",
"ç o",
"y et",
"g en",
"bura da",
"t im",
"bir az",
"es i",
"k or",
"doğ ru",
"in in",
"kı z",
"di ye",
"d ör",
"et ti",
"on un",
"is ti",
"ğ i",
"h e",
"s ana",
"ü ş",
"ar ka",
"hay ır",
"kar şı",
"h ar",
"il e",
"h ak",
"ı yor",
"ne den",
"s ev",
"sı z",
"ço cu",
"me m",
"ç alı",
"ol ur",
"b ır",
"g ir",
"is e",
"i h",
"c an",
"k ır",
"d ön",
"b öyle",
"sen i",
"! \"",
"al t",
"dör t",
"s öy",
"o ş",
"mu sun",
"la ş",
"h an",
"i p",
"ka y",
"h em",
"bü yük",
"a ç",
"bır ak",
"mi sin",
"s öz",
"u l",
"değ iş",
"ün ü",
"g ül",
"k ö",
"kar ı",
"ta mam",
"ol u",
"r ar",
"yen i",
"la m",
"mış tı",
"ya ş",
"al a",
"in iz",
"kad ın",
"bun un",
"m ey",
"al tı",
"y i",
"s o",
"in den",
"sen in",
"ya t",
"to p",
"s er",
"is i",
"d ün",
"s es",
"hiç bir",
"y on",
"d ın",
"t ün",
"baş ka",
"a s",
"he p",
"i t",
"ir mi",
"dev am",
"ola cak",
"ar tık",
"r e",
"dur um",
"im iz",
"üz el",
"ler ini",
"sa ğ",
"p ro",
"ger ek",
"y irmi",
"ş ek",
"ba ğ",
"me di",
"lar a",
"a h",
"t ur",
"y ür",
"ma sı",
"ka tı",
"de di",
"g ü",
"sor un",
"el i",
"ün e",
"mı z",
"yap ı",
"m il",
"ğ ını",
"t ara",
"m en",
"ha t",
"var dı",
"m et",
"konu ş",
"ar ak",
"lar ak",
"çocu k",
"bü tün",
"l ey",
"d ür",
"g üzel",
"ay ı",
"yap a",
"n ı",
"ay r",
"ö ne",
"yordu m",
"b an",
"i̇ ş",
"du m",
"un a",
"on a",
"yor lar",
"lar ını",
"çı kar",
"z an",
"se ç",
"l iyor",
"t ak",
"şı k",
"tek rar",
"a ş",
"e ş",
"miş ti",
"f ar",
"k in",
"im i",
"i f",
"e ğ",
"gi di",
"le ş",
"başla dı",
"gi de",
"ot ur",
"d de",
"ın dan",
"üz er",
"ın ın",
"n ız",
"u y",
"ye di",
"ka t",
"o larak",
"la dı",
"yal nız",
"ba h",
"iy et",
"m al",
"s ak",
"a çık",
"sın da",
".. .",
"in san",
"ay nı",
"e der",
"is tan",
"uz un",
"sa h",
"d o",
"g eri",
"er ek",
"ol an",
"ger çek",
"f en",
"al an",
"dı ş",
"alı k",
"far k",
"ü st",
"sa de",
"r i",
"k iş",
"l dı",
"z or",
"et ir",
"her kes",
"s al",
"ö mer",
"s el",
"un da",
"ha f",
"bun a",
"y dı",
"pek i",
"ada m",
"ha z",
"sın a",
"kap ı",
"gör üş",
"sade ce",
"al dı",
"gel di",
"i e",
"n ie",
"n a",
"r z",
"s z",
"c z",
"p o",
"s t",
"c h",
"i ę",
"d z",
"n i",
"a ł",
"r a",
"j e",
"r o",
"d o",
"s ię",
"z a",
"g o",
"e m",
"w i",
"c i",
"rz e",
"k o",
"l e",
"l i",
"w a",
"t o",
"k a",
"m i",
"ż e",
"t a",
"w ie",
"b y",
"m o",
"w y",
"rz y",
"ł a",
"j a",
"n o",
"ł o",
"w o",
"p a",
"m a",
"t e",
"t y",
"n y",
"k i",
"d a",
"n e",
"dz ie",
"dz i",
"cz y",
"c ie",
"m y",
"p rze",
"d y",
"o d",
"l a",
"k ie",
"r y",
"st a",
"j ą",
"ó w",
"c e",
"p rzy",
"c o",
"k u",
"m ie",
"sz y",
"cz e",
"r e",
"b a",
"s i",
"b ie",
"m u",
"w e",
"c y",
"ni a",
"ś ci",
"sz e",
"je st",
"k t",
"s a",
"b o",
"t u",
"ż y",
"n ą",
"b i",
"r u",
"a le",
"kt ó",
"p ra",
"ał a",
"m nie",
"p ie",
"ł y",
"cz a",
"ja k",
"ro z",
"r ó",
"l u",
"z na",
"g a",
"ra z",
"ł u",
"ta k",
"j u",
"p i",
"ś ć",
"s o",
"wi a",
"m ó",
"ch o",
"w szy",
"p e",
"s po",
"c a",
"g dy",
"w ał",
"w ię",
"d e",
"b e",
"p ro",
"ł em",
"j ę",
"s k",
"z e",
"l o",
"g i",
"r ę",
"do b",
"d u",
"ju ż",
"st o",
"b ę",
"ał em",
"sz a",
"m e",
"po d",
"d la",
"pa n",
"n ę",
"z o",
"mo że",
"ś li",
"s ie",
"ał o",
"t em",
"l ko",
"ny ch",
"po wie",
"c ię",
"s u",
"ty lko",
"i n",
"b u",
"na j",
"ch a",
"te go",
"p u",
"s ki",
"ne go",
"wszy st",
"sz cze",
"je d",
"je j",
"t wo",
"ą d",
"ś my",
"cz ę",
"wa ć",
"je go",
"ż a",
"i m",
"s y",
"pra w",
"ty m",
"któ ry",
"ał y",
"t rze",
"nie j",
"s e",
"ny m",
"i ch",
"o b",
". .",
"g ło",
"ją c",
"mó wi",
"s ka",
"o n",
"ne j",
"s łu",
"w ła",
"bę dzie",
"d ę",
"p ó",
"be z",
"ni c",
"p ła",
"ś cie",
"mi a",
"s ą",
"t rzy",
"kie m",
"by ł",
"mo g",
"ro bi",
"ta m",
"c u",
"te n",
"m ię",
"z y",
"pe w",
"ci a",
"my ś",
"prze d",
"s ko",
"n u",
"któ re",
"a l",
"l ę",
"w sze",
"ą c",
"by ło",
"so bie",
"p y",
"ci ą",
"ba r",
"je szcze",
"h a",
"t ę",
"b ra",
"cza s",
"sz ę",
"g ł",
"k ę",
"ma r",
"cz u",
"prze z",
"f i",
"s ło",
"w z",
"k to",
"k ów",
"cz o",
"li śmy",
"st ra",
"wię c",
"r ą",
"ma m",
"w ó",
"rz a",
"g ro",
"no ści",
"f a",
"we t",
"ną ł",
"ś mie",
"na wet",
"mu si",
"s wo",
"te j",
"w ą",
"w u",
"wi ą",
"ni u",
"cz ą",
"b li",
"dz o",
"s kie",
"n em",
"je śli",
"cze go",
"ch y",
"d ł",
"ty ch",
"by m",
"ż o",
"e ś",
"si ą",
"kie dy",
"na s",
"w ró",
"dz e",
"d ro",
"t ra",
"r ów",
"pa ni",
"z ie",
"ku l",
"na d",
"ch wi",
"ni m",
"t ro",
"by ć",
"cho dzi",
"ni o",
"dob rze",
"te raz",
"wo kul",
"co ś",
"k ł",
"pie r",
"h e",
"g dzie",
"dz y",
"p ię",
"d ź",
"k ą",
"g ó",
"z da",
"ch ce",
"st ę",
"o r",
"ś wia",
"wszyst ko",
"st ro",
"pe ł",
"wie m",
"wie l",
"ka ż",
"ki m",
"rz u",
"s ły",
"jed na",
"z u",
"myś l",
"mó j",
"g u",
"wa r",
"jest em",
"ó ż",
"mie j",
"mo ż",
"k ła",
"re sz",
"d łu",
"st wo",
"n ię",
"ma sz",
"że by",
"nie m",
"ja kie",
"st y",
"ni ą",
"we j",
"o j",
"g ra",
"s ła",
"no ść",
"z ło",
"sz czę",
".. .",
"r i",
"le j",
"we go",
"c ał",
"dzi ał",
"ki ch",
"dz a",
"dz ię",
"o czy",
"zo sta",
"cz ło",
"na m",
"ki l",
"o na",
"sz u",
"w ę",
"pa r",
"mi ał",
"st rze",
"ce j",
"e j",
"zna j",
"da ć",
"miej s",
"k ró",
"k ry",
"bar dzo",
"si a",
"z i",
"ś nie",
"l ą",
"g ie",
"cie bie",
"d ni",
"st u",
"po trze",
"wokul ski",
"u wa",
"u mie",
"jedna k",
"k ra",
"wró ci",
"czło wie",
"czy ć",
"by ła",
"że li",
"m ę",
"c ę",
"z robi",
"mog ę",
"pro wa",
"r em",
"nie ch",
"cz nie",
"k ro",
"t ą",
"ch ci",
"b ro",
"dzie ć",
"sz ą",
"pa d",
"t rz",
"t ru",
"je m",
"a ni",
"t ów",
"a r",
"d ru",
"ta j",
"rze kł",
"sa m",
"st e",
"nie go",
"ta kie",
"w ała",
"to wa",
"ka pła",
"wi dzi",
"po dob",
"dz ę",
"t ał",
"stę p",
"b ą",
"po ko",
"w em",
"g ę",
"a by",
"g e",
"al bo",
"s pra",
"z no",
"de n",
"s mo",
"je sz",
"k się",
"jest eś",
"po z",
"ni gdy",
"k sią",
"c óż",
"w s",
"po w",
"t ka",
"ś wie",
"sz ka",
"sa mo",
"s ł",
"rz ę",
"na le",
"chce sz",
"ni k",
"p ę",
"chy ba",
"cią g",
"ją cy",
"wo j",
"na sze",
"mnie j",
"wię cej",
"z wy",
"o sta",
"f e",
"wa ż",
"h o",
"se r",
"śmie r",
"wie r",
"dz ą",
"za ś",
"gdy by",
"ja ki",
"wo l",
"wi n",
"d ą",
"ści a",
"roz ma",
"wa l",
"pa nie",
"sta r",
"ka z",
"je żeli",
"d em",
"w ra",
"ko ń",
"sie bie",
"zno wu",
"p ró",
"cz em",
"st wa",
"i sto",
"pó ł",
"d ał",
"ko bie",
"ała m",
"wy ch",
"ce sa",
"ni ch",
"za wsze",
"dzi ć",
"te ż",
"le pie",
"pro szę",
"k re",
"t wa",
"o t",
"ł ą",
"ch u",
"c ą",
"p rz",
"ł e",
"sze dł",
"od powie",
"my śli",
"ś wią",
"e n",
"e r",
"d e",
"a n",
"e t",
"i j",
"i n",
"e l",
"a a",
"s t",
"o r",
"g e",
"i s",
"a t",
"i e",
"c h",
"o n",
"e en",
"h et",
"i t",
"v er",
"aa r",
"a l",
"o or",
"g en",
"v an",
"o p",
"d en",
"h e",
"o m",
"t e",
"w e",
"i k",
"r e",
"z e",
"ij n",
"d at",
"b e",
"d er",
"in g",
"o e",
"ij k",
"a an",
"ch t",
"v oor",
"l e",
"i et",
"r o",
"m o",
"k en",
"z ijn",
"m en",
"i g",
"j e",
"n iet",
"a r",
"o o",
"i d",
"u n",
"i l",
"s ch",
"mo et",
"st e",
"u r",
"o l",
"he b",
"u it",
"g el",
"w ij",
"a s",
"m e",
"t en",
"w or",
"o u",
"v en",
"l en",
"aa t",
"d it",
"m et",
"r a",
"b en",
"s p",
"o ver",
"d ie",
"n o",
"w er",
"l ijk",
"f t",
"s l",
"an d",
"v e",
"t er",
"i er",
"i en",
"t o",
"d aar",
"g r",
"b el",
"de ze",
"d u",
"a g",
"k an",
"wor den",
"in gen",
"moet en",
"n en",
"on der",
"heb ben",
"r u",
"oo k",
"s en",
"c t",
"k t",
"no g",
"aa l",
"w as",
"u l",
"e er",
"b ij",
"m ijn",
"p ro",
"v ol",
"d o",
"k om",
"at ie",
"e ft",
"k el",
"al s",
"r ij",
"he id",
"a f",
"st el",
"m aar",
"a p",
"we e",
"a d",
"he eft",
"w aar",
"i cht",
"d an",
"er en",
"n e",
"w el",
"w at",
"w il",
"a cht",
"aa g",
"ge b",
"c on",
"z o",
"k e",
"b et",
"h ij",
"d ig",
"k un",
"u w",
"d t",
"d oor",
"t ij",
"a m",
"an g",
"on d",
"er s",
"is ch",
"ge en",
"i ge",
"ge v",
"ve el",
"n u",
"m a",
"on s",
"o f",
"b l",
"n aar",
"g ro",
"p l",
"an der",
"at en",
"kun nen",
"e cht",
"h ier",
"g oe",
"an t",
"u s",
"t wee",
"on t",
"de lijk",
"el e",
"u ur",
"al le",
"t oe",
"me er",
"i st",
"n a",
"n ie",
"on ze",
"l o",
"i m",
"p en",
"h ad",
"tij d",
"h oe",
"to t",
"z ou",
"a k",
"aa k",
"a men",
"d r",
"w oor",
"s e",
"wor dt",
"o t",
"gel ijk",
"g aan",
"i c",
"g er",
"k er",
"el d",
"e m",
"h ou",
"de l",
"z en",
"z el",
"te gen",
"b o",
"kom en",
"c om",
"i gen",
"e it",
"wer k",
"goe d",
"z al",
"z ij",
"sl ag",
"e s",
"z ien",
"a st",
"echt er",
"it ie",
"t ie",
"el ijk",
"m is",
"isch e",
"bel an",
"h aar",
"i ch",
"b er",
"h an",
"v r",
"al e",
"c i",
"gr ijk",
"in d",
"do en",
"l and",
"belan grijk",
"p un",
"op en",
"ct ie",
"zel f",
"m ij",
"it eit",
"ste m",
"me e",
"ar en",
"al l",
"b r",
"re cht",
"d ien",
"h u",
"g aat",
"pro b",
"m oe",
"p er",
"a u",
"ul len",
"z ich",
"daar om",
"or m",
"k l",
"v o",
"en t",
"st aat",
"z it",
"du i",
"n at",
"du s",
"d s",
"ver slag",
"kel ijk",
"prob le",
"w et",
"ge m",
"c r",
"i on",
"p r",
"sch ap",
"g d",
"h un",
"z a",
"er d",
"z et",
"st aan",
"st r",
"m aal",
"in der",
"e id",
"st en",
"p ar",
"k ken",
"ge d",
"z ullen",
"re s",
"men sen",
"j aar",
"re gel",
"ie der",
"vol gen",
"ge ven",
"e ven",
"l u",
"bl ij",
"i ë",
"k o",
"u we",
"m an",
"ma ken",
"l ie",
"g a",
"oe k",
"nie uwe",
"b aar",
"h o",
"h er",
"in ter",
"ander e",
"ru ik",
"s u",
"a gen",
"or t",
"m er",
"ou w",
"st er",
"wil len",
"aa kt",
"h oo",
"an den",
"f f",
"l ig",
"t re",
"s amen",
"ze er",
"dui delijk",
"ant woor",
"he el",
"men t",
"pun t",
"hou den",
"we g",
"vr aag",
"gel e",
"een s",
"be sch",
"om en",
"er g",
"do el",
"d ag",
"sp e",
"ur en",
"ing s",
"or en",
"l ang",
"de len",
"m ar",
"ste un",
"in nen",
"p ol",
"o on",
"i de",
"s n",
"s ie",
"r icht",
"z onder",
"no dig",
"all een",
"m id",
"ra gen",
"iet s",
"ver sch",
"geb ruik",
"st u",
"ro uw",
"stel len",
"be g",
"men ten",
"v in",
"eer ste",
"l aat",
"gro ot",
"oo d",
"to ch",
"l aten",
"aar d",
"s le",
"de el",
"st and",
"pl aat",
"re e",
"bet re",
"d i",
"l id",
"uit en",
"ra cht",
"bel eid",
"g et",
"ar t",
"st ie",
"st aten",
"g gen",
"re ken",
"e in",
"al en",
"m ing",
"mo gelijk",
"gro te",
"al tijd",
"z or",
"en kel",
"w ik",
"pol itie",
"e igen",
"el k",
"han del",
"g t",
"k we",
"m aat",
"el en",
"i p",
"v rij",
"s om",
"je s",
"aa m",
"hu is",
"v al",
"we er",
"lid staten",
"k ing",
"k le",
"be d",
"gev al",
"stel l",
"a i",
"wik kel",
"kwe stie",
"t al",
"ste e",
"a b",
"h el",
"kom st",
"p as",
"s s",
"it u",
"i den",
"eer d",
"m in",
"c e",
"p o",
"twee de",
"proble em",
"w aren",
"us sen",
"sn el",
"t ig",
"ge w",
"j u",
"ul t",
"ne men",
"com mis",
"versch il",
"k on",
"z oek",
"k rij",
"gr aag",
"den k",
"l anden",
"re den",
"be sl",
"oe g",
"bet er",
"he den",
"m ag",
"p e",
"bo ven",
"a c",
"con t",
"f d",
"h ele",
"k r",
"v ier",
"w in",
"ge z",
"k w",
"m il",
"v or",
"he m",
"ra m",
"aa s",
"ont wikkel",
"dr ie",
"v aak",
"plaat s",
"l a",
"g ang",
"ij f",
"f in",
"nat uur",
"t ussen",
"u g",
"in e",
"d a",
"b at",
"kom t",
"w acht",
"aa d",
"u t",
"é n",
"acht er",
"geb ie",
"ver k",
"lig t",
"c es",
"nie uw",
"van d",
"s t",
"n í",
"j e",
"p o",
"c h",
"r o",
"n a",
"s e",
"t o",
"n e",
"l e",
"k o",
"l a",
"d o",
"r a",
"n o",
"t e",
"h o",
"n ě",
"v a",
"l i",
"l o",
"ř e",
"c e",
"d e",
"v e",
"b y",
"n i",
"s k",
"t a",
"n á",
"z a",
"p ro",
"v o",
"v ě",
"m e",
"v á",
"s o",
"k a",
"r á",
"v y",
"z e",
"m i",
"p a",
"t i",
"st a",
"m ě",
"n é",
"ř i",
"ř í",
"m o",
"ž e",
"m a",
"j í",
"v ý",
"j i",
"d ě",
"r e",
"d a",
"k u",
"j a",
"c i",
"r u",
"č e",
"o b",
"t ě",
"m u",
"k y",
"d i",
"š e",
"k é",
"š í",
"t u",
"v i",
"p ře",
"v í",
"s i",
"n ý",
"o d",
"so u",
"v é",
"n y",
"r i",
"d y",
"b u",
"b o",
"t y",
"l á",
"l u",
"n u",
"ž i",
"m á",
"st i",
"c í",
"z á",
"p ra",
"sk é",
"m í",
"c o",
"d u",
"d á",
"by l",
"st o",
"s a",
"t í",
"je d",
"p ří",
"p ři",
"t é",
"s í",
"č i",
"v ní",
"č a",
"d í",
"z i",
"st u",
"p e",
"b a",
"d ní",
"ro z",
"va l",
"l í",
"s po",
"k á",
"b e",
"p i",
"no u",
"ta k",
"st e",
"r y",
"l é",
"vě t",
"se m",
"p ě",
"ko n",
"ne j",
"l y",
"ko u",
"ý ch",
"b ě",
"p r",
"f i",
"p rá",
"a le",
"ja ko",
"po d",
"ž í",
"z í",
"j sou",
"j sem",
"ch o",
"l ní",
"c ké",
"t á",
"m y",
"a k",
"h u",
"va t",
"pře d",
"h la",
"k e",
"st á",
"č í",
"š i",
"s le",
"k la",
"š tě",
"lo u",
"m ů",
"z na",
"ch á",
"o r",
"p ů",
"h a",
"b i",
"ta ké",
"d ů",
"no st",
"t ře",
"te r",
"p u",
"i n",
"v r",
"ve l",
"sk u",
"v še",
"t ní",
"do b",
"by la",
"č ní",
"ja k",
"v u",
"je ho",
"b ý",
"vá ní",
"ný ch",
"po u",
"te n",
"t ři",
"v z",
"st ře",
"d va",
"h le",
"č á",
"no sti",
"c k",
"v š",
"vo u",
"s u",
"h e",
"h ra",
"je n",
"s y",
"da l",
"po z",
"s lo",
"te l",
"d ru",
"de n",
"vš ak",
"g i",
"k dy",
"by lo",
"bu de",
"st ra",
"j ší",
"m é",
"me n",
"vý ch",
"ní m",
"s m",
"ko li",
"r ů",
"t ra",
"mů že",
"ne ní",
"ho d",
"b í",
"do u",
"sk a",
"t ý",
"st ě",
"u je",
"s á",
"pě t",
"ne s",
"k rá",
"to m",
"st ví",
"v ně",
"se d",
"s vé",
"p í",
"z o",
"mu sí",
"u ž",
"tí m",
"jí cí",
"jed no",
"t r",
"ča s",
"e v",
"č ty",
"sk ý",
"ni c",
"ev ro",
"to ho",
"h y",
"k ter",
"r ní",
"st í",
"s vě",
"pa k",
"vše ch",
"k ů",
"n g",
"á d",
"chá zí",
"a ni",
"a r",
"jed na",
"bý t",
"t ro",
"k ra",
"pr vní",
"m no",
"ské ho",
"p á",
"p la",
"le m",
"ne bo",
"ke m",
"st ro",
"s la",
"né ho",
"z de",
"dal ší",
"ř a",
"čty ři",
"h rá",
"dru h",
"l ně",
"v la",
"sk ých",
"š ko",
"pů so",
"pro to",
"v ů",
"sk á",
"ve n",
"še st",
"d ně",
"je ště",
"me zi",
"te k",
"s ko",
"ch a",
"ně koli",
"be z",
"g ra",
"ji ž",
"č ně",
"j á",
"s lu",
"z ná",
"ve r",
"sed m",
"k ro",
"ta m",
"a no",
"v lá",
"o sm",
"byl y",
"vá m",
"ck ý",
"te ch",
"dě ji",
"vel mi",
"le ži",
"va la",
"l ý",
"t vo",
"spo le",
"ch u",
"stu p",
"mo ž",
"evro p",
"g e",
"sta l",
"j de",
"ch y",
"ro di",
"je jí",
"po li",
"de vět",
"s me",
"a ž",
"té to",
"re m",
"d é",
"f or",
"u ni",
"f o",
"ten to",
"a u",
"ka ž",
"nu la",
"na d",
"by ch",
"mo c",
"sto u",
"e x",
"le n",
"k do",
"z d",
"pra co",
"to mu",
"ný m",
"ži vo",
"ze m",
"f e",
"f u",
"ná sle",
"j o",
"sk y",
"ji ch",
"h á",
"mě l",
"dě la",
"j sme",
"p re",
"ni ce",
"ste j",
"ne m",
"st ní",
"he m",
"ná ro",
"z u",
"b li",
"ni t",
"pa r",
"a l",
"poz ději",
"ta ko",
"n ce",
"če r",
"ší m",
"ně co",
"vá l",
"ře j",
"krá t",
"á lní",
"u r",
". .",
"a si",
"kter é",
"sta v",
"ma jí",
"my s",
"do bě",
"s ně",
"ce n",
"z y",
"z ku",
"t ů",
"ch od",
"s pě",
"je jich",
"sou čas",
"d r",
"va li",
"ri e",
"k te",
"pr ů",
"ze ní",
"pa t",
"a n",
"po tře",
"de m",
"d nes",
"ze mí",
"sa mo",
"zna m",
"b ra",
"má m",
"te dy",
"g o",
"hla vní",
"pou ží",
"b ní",
"ve de",
"le p",
"je k",
"pra v",
"poli ti",
"d ne",
"je m",
"le t",
"če ní",
"pro b",
"ne ž",
"dě l",
"fi l",
"č o",
"cí ch",
"st é",
"d lou",
"h i",
"a by",
"to u",
"několi k",
"d la",
"vy u",
"vi t",
"ho u",
"ck ých",
"no vé",
"či n",
"st y",
"dě lá",
"k ý",
"ob la",
"pod le",
"ra n",
"dů leži",
"ta to",
"po ku",
"ko ne",
"d ý",
"d vě",
"ž ád",
"nou t",
"t ku",
"t vr",
"cké ho",
"ro v",
"r é",
"te le",
"p sa",
"s vět",
"ti vní",
"do sta",
"te m",
"še l",
"druh é",
"s kou",
"ž o",
"jed ná",
"vý znam",
"prob lé",
"pu bli",
"vá n",
"od po",
"pod po",
"d le",
"ja ké",
"še ní",
"ví m",
"bě hem",
"na chází",
"s lou",
"pou ze",
"o tá",
"p lo",
"to vé",
"vět ši",
"ko mi",
"va jí",
"ty to",
"zá pa",
"z mě",
"mo h",
"ví ce",
"spole č",
"au to",
"pro ti",
"st ru",
"dě t",
"chá ze",
"že l",
"с т",
"е н",
"н о",
"н а",
"п р",
"т о",
"п о",
"р а",
"г о",
"к о",
"н е",
"в о",
"в а",
"е т",
"е р",
"н и",
"е л",
"и т",
"н ы",
"з а",
"р о",
"ен и",
"к а",
"л и",
"е м",
"д а",
"о б",
"л а",
"д о",
"с я",
"т ь",
"о т",
"л о",
"л ь",
"е д",
"с о",
"м и",
"р е",
"м о",
"ц и",
"пр о",
"т а",
"э то",
"к и",
"р у",
"пр и",
"т и",
"с е",
"ст а",
"в ы",
"м ы",
"в и",
"б ы",
"м а",
"е с",
"л я",
"ст и",
"л е",
"ч то",
"м е",
"р и",
"ч а",
"о д",
"е й",
"ел ь",
"ени я",
"г а",
"н у",
"с и",
"п а",
"ра з",
"б о",
"ст о",
"с у",
"с а",
"д у",
"е го",
"е ст",
"и н",
"ит ь",
"и з",
"ж е",
"м у",
"п ер",
"по д",
"ени е",
"с ь",
"к у",
"пр ед",
"но го",
"ны х",
"в ер",
"т е",
"но й",
"ци и",
"д е",
"р ы",
"д ел",
"л ю",
"в е",
"о н",
"м ен",
"г и",
"н я",
"б у",
"пр а",
"в се",
"ет ся",
"ст ь",
"ж а",
"до л",
"ж и",
"б е",
"ко н",
"с л",
"ш и",
"д и",
"ст в",
"с ко",
"ны е",
"ч и",
"ю т",
"д ер",
"ст ра",
"т ы",
"х од",
"щ и",
"з о",
"з на",
"но сти",
"ч ес",
"в ля",
"ва ть",
"о р",
"по л",
"в ет",
"та к",
"ш а",
"т у",
"с во",
"пр е",
"о на",
"ит ель",
"ны й",
"с ло",
"ка к",
"в л",
"но сть",
"х о",
"мо ж",
"п е",
"д ля",
"ни я",
"но е",
"ра с",
"дол ж",
"да р",
"т ель",
"с ка",
"п у",
"ст во",
"ко то",
"ра б",
"е е",
"ро д",
"э ти",
"с об",
"о ру",
"ж ен",
"ны м",
"ит и",
"ни е",
"ко м",
"д ет",
"ст у",
"г у",
"п и",
"ме ж",
"ени ю",
"т ер",
"раб от",
"во з",
"ци я",
"ко й",
"щ ест",
"г ра",
"з и",
"р я",
"меж ду",
"ст ва",
"в с",
"ел о",
"ш е",
"м ер",
"б а",
"з ы",
"л у",
"а ль",
"д ей",
"г ла",
"на род",
"к ти",
"пред ста",
"л ся",
"я вля",
"с ки",
"но в",
"ед ин",
"ро в",
"и с",
"ни ма",
"р ем",
"ход и",
"так же",
"д ру",
"а ть",
"сл ед",
"го во",
"на я",
"ю щи",
"ен ь",
"кото ры",
"х от",
"в у",
"и х",
"ем у",
"ч ит",
"ва ж",
"ор га",
"чес ки",
"щ е",
"к е",
"х а",
"по с",
"то м",
"бо ль",
"м не",
"па с",
"об ъ",
"пра в",
"кон ф",
"сл у",
"под дер",
"ст ви",
"на ш",
"ль ко",
"сто я",
"ну ю",
"л ем",
"ен ных",
"к ра",
"д ы",
"между народ",
"г да",
"не об",
"го су",
"ств у",
"ени и",
"госу дар",
"к то",
"и м",
"ч ест",
"р ет",
"во про",
"л ен",
"ел и",
"ро ва",
"ци й",
"на м",
"это й",
"ж ения",
"необ ходи",
"мен я",
"бы ло",
"си ли",
"ф и",
"в я",
"ш ь",
"это го",
"о ни",
"орга ни",
"бе зо",
"пр об",
"и ме",
"ре ш",
"б и",
"безо пас",
"ют ся",
"о ста",
"ен но",
"го д",
"ел а",
"предста в",
"ть ся",
"сло во",
"органи за",
"долж ны",
"это м",
"б ла",
"ч е",
"ч у",
"бла го",
"это му",
"в рем",
"с пе",
"но м",
"ени й",
"с по",
"на с",
"не т",
"з у",
"в ед",
"е ще",
"ска за",
"се й",
"ер ен",
"да н",
"са м",
"ел я",
"ра н",
"зы ва",
"явля ется",
"бу дет",
"кти в",
"т ре",
"дел е",
"м от",
"конф ерен",
"ла сь",
"ча с",
"сто ро",
"ко го",
"е з",
"не й",
"о с",
"ли сь",
"раз ору",
"пер е",
"с си",
"ны ми",
"про ц",
"го ло",
"ч ело",
"бо ле",
"чело ве",
"с ер",
"п л",
"ч ет",
"стра н",
"п я",
"бы л",
"к ла",
"то в",
"ж д",
"дел а",
"е ра",
"у же",
"со вет",
"г ен",
"безопас ности",
"ц а",
"се да",
"по з",
"от вет",
"проб лем",
"на ко",
"т ем",
"до ста",
"п ы",
"щ а",
"во й",
"су щест",
"необходи мо",
"бы ть",
"мож ет",
"д ем",
"что бы",
"е к",
"ч ер",
"у сили",
"ре с",
"ру д",
"един енных",
"д об",
"до сти",
"ств ен",
"я дер",
"год ня",
"ка за",
"се годня",
"сей час",
"то лько",
"во д",
"ес ь",
"м ного",
"бу ду",
"е в",
"ест ь",
"т ри",
"об щест",
". .",
"я вл",
"вы сту",
"р ед",
"с чит",
"с ит",
"деле га",
"ло ж",
"это т",
"ф ор",
"к лю",
"воз мож",
"ва ния",
"б ли",
"и ли",
"в з",
"на ций",
"ско го",
"при ня",
"п ла",
"о ч",
"ить ся",
"ст е",
"на ши",
"которы е",
"а р",
"име ет",
"с от",
"зна ч",
"пер ь",
"след у",
"ен ы",
"та ки",
"объ единенных",
"ст ро",
"те перь",
"б ле",
"благо дар",
"раз в",
"а н",
"жи ва",
"оч ень",
"я т",
"бе з",
"об ес",
"г ро",
"ло сь",
"с ы",
"организа ции",
"ч лен",
"то го",
"она ль",
"ж да",
"все х",
"с вя",
"боле е",
"со в",
"ко гда",
"во т",
"к ре",
"к ры",
"по этому",
"во ль",
"о й",
"ген ера",
"ч ем",
"л ы",
"пол ити",
"в ен",
"конферен ции",
"проц ес",
"б я",
"ит е",
"от но",
"разв ити",
"а ф",
"ю щ",
"в но",
"ми р",
"ни и",
"ка я",
"а с",
"итель но",
"в то",
"ени ем",
"генера ль",
"пр от",
"вс ем",
"сам бле",
"ас самбле",
"о м",
"з д",
"с мот",
"ре ги",
"ч его",
"од нако",
"усили я",
"дей стви",
"ч но",
"у ча",
"об раз",
"во с",
"э та",
"пер его",
"гово р",
"ва м",
"мо ло",
"врем я",
"д ь",
"хот ел",
"г ру",
"за явл",
"пре доста",
"по ль",
"не е",
"ре зо",
"перего во",
"резо лю",
"к рет",
"поддер ж",
"обес пе",
"не го",
"представ ит",
"на де",
"к ри",
"ч ь",
"про ек",
"л ет",
"дру ги",
"ا ل",
"َ ا",
"و َ",
"ّ َ",
"ِ ي",
"أ َ",
"ل َ",
"ن َ",
"ال ْ",
"ه ُ",
"ُ و",
"م ا",
"ن ْ",
"م ن",
"ع َ",
"ن ا",
"ل ا",
"م َ",
"ت َ",
"ف َ",
"أ ن",
"ل ي",
"م ِ",
"ا ن",
"ف ي",
"ر َ",
"ي َ",
"ه ِ",
"م ْ",
"ق َ",
"ب ِ",
"ل ى",
"ي ن",
"إ ِ",
"ل ِ",
"و ا",
"ك َ",
"ه ا",
"ً ا",
"م ُ",
"و ن",
"ال م",
"ب َ",
"ي ا",
"ذ ا",
"س ا",
"ال ل",
"م ي",
"ي ْ",
"ر ا",
"ر ي",
"ل ك",
"م َا",
"ن َّ",
"ل م",
"إ ن",
"س ت",
"و م",
"ّ َا",
"ل َا",
"ه م",
"ّ ِ",
"ك ُ",
"ك ان",
"س َ",
"ب ا",
"د ي",
"ح َ",
"ع ْ",
"ب ي",
"ال أ",
"و ل",
"ف ِي",
"ر ِ",
"د ا",
"مِ نْ",
"ُو نَ",
"و ْ",
"ه َا",
"ّ ُ",
"ال س",
"ال َ",
"ن ي",
"ل ْ",
"ت ُ",
"ه ل",
"ر ة",
"د َ",
"س ْ",
"ت ِ",
"ن َا",
"ر ْ",
"الل َّ",
"سا مي",
"ك ن",
"ك ل",
"ه َ",
"عَ لَ",
"ع لى",
"م ع",
"إ لى",
"ق د",
"ال ر",
"ُو ا",
"ي ر",
"ع ن",
"ي ُ",
"ن ِ",
"ب ْ",
"ال ح",
"هُ مْ",
"ق ا",
"ذ ه",
"ال ت",
"ِي نَ",
"ج َ",
"ه ذا",
"ع د",
"ال ع",
"د ْ",
"قَ الَ",
"ر ُ",
"ي م",
"ي ة",
"ن ُ",
"خ َ",
"ر ب",
"ال ك",
"و َا",
"أ نا",
"ة ِ",
"ال ن",
"ح د",
"ع ِ",
"ت ا",
"ه و",
"ف ا",
"ع ا",
"ال ش",
"ل ُ",
"ي ت",
"ذ َا",
"ي ع",
"ال ذ",
"ح ْ",
"ال ص",
"إِ نَّ",
"ج ا",
"ع لي",
"ك َا",
"ب ُ",
"ت ع",
"و ق",
"م ل",
"ل َّ",
"ي د",
"أ خ",
"ر ف",
"ت ي",
"ال ِ",
"ّ ا",
"ذ لك",
"أَ نْ",
"س ِ",
"ت وم",
"م ر",
"مَ نْ",
"ب ل",
"ال ق",
"الل ه",
"ِي َ",
"ك م",
"ذ َ",
"ع ل",
"ح ب",
"س ي",
"ع ُ",
"ال ج",
"ال د",
"ش َ",
"ت ك",
"ف ْ",
"ص َ",
"ل ل",
"د ِ",
"ب ر",
"ف ِ",
"ت ه",
"أ ع",
"ت ْ",
"ق ْ",
"الْ أَ",
"ئ ِ",
"عَ نْ",
"و ر",
"ح ا",
"ال َّ",
"م ت",
"ف ر",
"د ُ",
"ه نا",
"وَ أَ",
"ت ب",
"ة ُ",
"أ ي",
"س ب",
"ري د",
"و ج",
"كُ مْ",
"ح ِ",
"ك ْ",
"د ر",
"َا ء",
"ه ذه",
"ال ط",
"الْ مُ",
"د ة",
"ق ل",
"غ َ",
"ي وم",
"الَّ ذ",
"ك ر",
"ت ر",
"ك ِ",
"ك ي",
"عَلَ ى",
"رَ ب",
"ع ة",
"ق ُ",
"ج ْ",
"ف ض",
"ل ة",
"ه ْ",
"ر َا",
"وَ لَ",
"الْ مَ",
"أَ نَّ",
"ي َا",
"أ ُ",
"ش ي",
"اللَّ هُ",
"لَ ى",
"ق ِ",
"أ ت",
"عَلَ يْ",
"اللَّ هِ",
"ال ب",
"ض َ",
"ة ً",
"ق ي",
"ا ر",
"ب د",
"خ ْ",
"سْ تَ",
"ط َ",
"قَ دْ",
"ذه ب",
"أ م",
"ما ذا",
"وَ إِ",
"ة ٌ",
"و نَ",
"لي لى",
"و لا",
"ح ُ",
"ه ي",
"ص ل",
"ال خ",
"و د",
"لي س",
"ل دي",
"ق ال",
"كَا نَ",
"م َّ",
"ح ي",
"ت م",
"ل ن",
"وَ لَا",
"ب ع",
"يم كن",
"س ُ",
"ة َ",
"ح ت",
"ر ًا",
"ك ا",
"ش ا",
"هِ مْ",
"لَ هُ",
"ز َ",
"دا ً",
"م س",
"ك ث",
"الْ عَ",
"ج ِ",
"ص ْ",
"ف َا",
"ل ه",
"و ي",
"ع َا",
"هُ وَ",
"ب ِي",
"ب َا",
"أ س",
"ث َ",
"ل ِي",
"ر ض",
"الر َّ",
"لِ كَ",
"ت َّ",
"ف ُ",
"ق ة",
"ف عل",
"مِ ن",
"ال آ",
"ث ُ",
"س م",
"م َّا",
"بِ هِ",
"ت ق",
"خ ر",
"ل قد",
"خ ل",
"ش ر",
"أن ت",
"ل َّا",
"س ن",
"الس َّ",
"الذ ي",
"س َا",
"و ما",
"ز ل",
"و ب",
"أ ْ",
"إ ذا",
"ر ِي",
"ح ة",
"ن ِي",
"الْ حَ",
"وَ قَالَ",
"ب ه",
"ة ٍ",
"س أ",
"ر ٌ",
"ب ال",
"م ة",
"ش ْ",
"و ت",
"عن د",
"ف س",
"بَ عْ",
"ه ر",
"ق ط",
"أ ح",
"إن ه",
"و ع",
"ف ت",
"غ ا",
"هنا ك",
"ب ت",
"مِ نَ",
"س ر",
"ذَ لِكَ",
"ر س",
"حد ث",
"غ ْ",
"ّ ِي",
"ال إ",
"وَ يَ",
"ج ل",
"ا ست",
"ق ِي",
"ع ب",
"و س",
"ي ش",
"الَّذ ِينَ",
"تا ب",
"د ِي",
"ج ب",
"ك ون",
"ب ن",
"ال ث",
"لَ يْ",
"ب عد",
"وَ الْ",
"فَ أَ",
"ع م",
"هُ م",
"ت ن",
"ذ ْ",
"أ ص",
"أ ين",
"رَب ِّ",
"الذ ين",
"إِ ن",
"ب ين",
"ج ُ",
"عَلَيْ هِ",
"ح َا",
"ل و",
"ست ط",
"ظ ر",
"لَ مْ",
"ء ِ",
"كُ ل",
"ط ل",
"ت َا",
"ض ُ",
"كن ت",
"ل ًا",
"م ٌ",
"ق بل",
"ـ ـ",
"ذ ِ",
"قَ وْ",
"ص ِ",
"م ًا",
"كان ت",
"ص ا",
"ي ق",
"ال ف",
"ال نا",
"م ٍ",
"إِ نْ",
"ال نَّ",
"ج د",
"وَ مَا",
"ت ت",
"ب ح",
"م كان",
"كي ف",
"ّ ة",
"ال ا",
"ج َا",
"أ و",
"سا عد",
"ض ِ",
"إ لا",
"را ً",
"ق َا",
"ر أ",
"ع ت",
"أ حد",
"ه د",
"ض ا",
"ط ر",
"أ ق",
"ما ء",
"د َّ",
"ال با",
"م ُو",
"أَ وْ",
"ط ا",
"ق ُو",
"خ ِ",
"ت ل",
"ستط يع",
"د َا",
"الن َّا",
"إ لَى",
"وَ تَ",
"هَ ذَا",
"ب ة",
"علي ك",
"ج ر",
"ال من",
"ز ا",
"ر ٍ",
"د ع",
"ّ ًا",
"س ة",
"ثُ مَّ",
"شي ء",
"ال غ",
"ت ح",
"ر ُونَ",
"ال يوم",
"م ِي",
"ن ُوا",
"أ ر",
"تُ مْ",
"ع ر",
"ي ف",
"أ ب",
"د ًا",
"ص َا",
"الت َّ",
"أ ريد",
"ال ز",
"يَ وْ",
"إ لي",
"ج ي",
"يَ عْ",
"فض ل",
"ال إن",
"أن ه",
"n g",
"i 4",
"a n",
"s h",
"z h",
"i 2",
"ng 1",
"u 4",
"i 1",
"ng 2",
"d e",
"j i",
"a o",
"x i",
"u 3",
"de 5",
"e 4",
"i 3",
"ng 4",
"an 4",
"e n",
"u o",
"sh i4",
"an 2",
"u 2",
"c h",
"u 1",
"ng 3",
"a 1",
"an 1",
"e 2",
"a 4",
"e i4",
"o ng1",
"a i4",
"ao 4",
"h u",
"a ng1",
"l i",
"y o",
"an 3",
"w ei4",
"uo 2",
"n 1",
"en 2",
"ao 3",
"e 1",
"y u",
"q i",
"e ng2",
"zh o",
"a ng3",
"a ng4",
"a ng2",
"uo 4",
"m i",
"g e4",
"y i1",
"g uo2",
"e r",
"b i",
"a 3",
"h e2",
"e 3",
"y i2",
"d i4",
"zh ong1",
"b u4",
"g u",
"a i2",
"n 2",
"z ai4",
"sh i2",
"e ng1",
"r en2",
"o ng2",
"xi an4",
"y i",
"x u",
"n 4",
"l i4",
"en 4",
"y u2",
"e i2",
"yi2 ge4",
"o u4",
"e i3",
"d i",
"u i4",
"a 2",
"yo u3",
"ao 1",
"d a4",
"ch eng2",
"en 1",
"e ng4",
"y i4",
"s i1",
"zh i4",
"ji a1",
"yu an2",
"n i",
"t a1",
"de5 yi2ge4",
"k e1",
"sh u3",
"x i1",
"j i2",
"ao 2",
"t i",
"o u3",
"o ng4",
"xi a4",
"a i1",
"g ong1",
"zh i1",
"en 3",
"w ei2",
"j u",
"xu e2",
"q u1",
"zho u1",
"er 3",
"mi ng2",
"zho ng3",
"l i3",
"w u4",
"y i3",
"uo 1",
"e 5",
"j i4",
"xi ng2",
"ji an4",
"hu a4",
"y u3",
"uo 3",
"j i1",
"a i3",
"z uo4",
"h ou4",
"hu i4",
"e i1",
"ni an2",
"q i2",
"p i",
"d ao4",
"sh eng1",
"de 2",
"d ai4",
"u an2",
"zh e4",
"zh eng4",
"b en3",
"sh ang4",
"zh u3",
"b ei4",
"y e4",
"ch u1",
"zh an4",
"l e5",
"l ai2",
"sh i3",
"n an2",
"r en4",
"yo u2",
"k e4",
"b a1",
"f u4",
"d ui4",
"y a4",
"m ei3",
"z i4",
"xi n1",
"ji ng1",
"zh u",
"n 3",
"yo ng4",
"m u4",
"ji ao4",
"y e3",
"ji n4",
"bi an4",
"l u4",
"q i1",
"sh e4",
"xi ang1",
"o ng3",
"sh u4",
"d ong4",
"s uo3",
"gu an1",
"s an1",
"b o",
"t e4",
"d uo1",
"f u2",
"mi n2",
"l a1",
"zh i2",
"zh en4",
"o u1",
"w u3",
"m a3",
"i 5",
"z i5",
"j u4",
"er 4",
"y ao4",
"xia4 de5yi2ge4",
"s i4",
"t u2",
"sh an1",
"z ui4",
"ch u",
"yi n1",
"er 2",
"t ong2",
"d ong1",
"y u4",
"y an2",
"qi an2",
"shu3 xia4de5yi2ge4",
"ju n1",
"k e3",
"w en2",
"f a3",
"l uo2",
"zh u4",
"x i4",
"k ou3",
"b ei3",
"ji an1",
"f a1",
"di an4",
"ji ang1",
"wei4 yu2",
"xi ang4",
"zh i3",
"e ng3",
"f ang1",
"l an2",
"sh u",
"r i4",
"li an2",
"sh ou3",
"m o",
"qi u2",
"ji n1",
"h uo4",
"shu3xia4de5yi2ge4 zhong3",
"f en1",
"n ei4",
"g ai1",
"mei3 guo2",
"u n2",
"g e2",
"b ao3",
"qi ng1",
"g ao1",
"t ai2",
"d u",
"xi ao3",
"ji e2",
"ti an1",
"ch ang2",
"q uan2",
"li e4",
"h ai3",
"f ei1",
"t i3",
"ju e2",
"o u2",
"c i3",
"z u2",
"n i2",
"bi ao3",
"zhong1 guo2",
"d u4",
"yu e4",
"xi ng4",
"sh eng4",
"ch e1",
"d an1",
"ji e1",
"li n2",
"pi ng2",
"f u3",
"g u3",
"ji e4",
"w o",
"v 3",
"sh eng3",
"n a4",
"yu an4",
"zh ang3",
"gu an3",
"d ao3",
"z u3",
"di ng4",
"di an3",
"c eng2",
"ren2 kou3",
"t ai4",
"t ong1",
"g uo4",
"n eng2",
"ch ang3",
"hu a2",
"li u2",
"yi ng1",
"xi ao4",
"c i4",
"bian4 hua4",
"li ang3",
"g ong4",
"zho ng4",
"de5 yi1",
"s e4",
"k ai1",
"w ang2",
"ji u4",
"sh i1",
"sh ou4",
"m ei2",
"k u",
"s u",
"f eng1",
"z e2",
"tu2 shi4",
"t i2",
"q i4",
"ji u3",
"sh en1",
"zh e3",
"ren2kou3 bian4hua4",
"ren2kou3bian4hua4 tu2shi4",
"di4 qu1",
"y ang2",
"m en",
"men 5",
"l ong2",
"bi ng4",
"ch an3",
"zh u1",
"w ei3",
"w ai4",
"xi ng1",
"bo 1",
"b i3",
"t ang2",
"hu a1",
"bo 2",
"shu i3",
"sh u1",
"d ou1",
"s ai4",
"ch ao2",
"b i4",
"li ng2",
"l ei4",
"da4 xue2",
"f en4",
"shu3 de5",
"m u3",
"ji ao1",
"d ang1",
"ch eng1",
"t ong3",
"n v3",
"q i3",
"y an3",
"mi an4",
"l uo4",
"ji ng4",
"g e1",
"r u4",
"d an4",
"ri4 ben3",
"p u3",
"yu n4",
"hu ang2",
"wo 3",
"l v",
"h ai2",
"shi4 yi1",
"xi e1",
"yi ng3",
"w u2",
"sh en2",
"w ang3",
"gu ang3",
"li u4",
"s u4",
"shi4 zhen4",
"c an1",
"c ao3",
"xi a2",
"k a3",
"d a2",
"h u4",
"b an4",
"d ang3",
"h u2",
"z ong3",
"de ng3",
"de5yi2ge4 shi4zhen4",
"ch uan2",
"mo 4",
"zh ang1",
"b an1",
"mo 2",
"ch a2",
"c e4",
"zhu3 yao4",
"t ou2",
"j u2",
"shi4 wei4yu2",
"s a4",
"u n1",
"ke3 yi3",
"d u1",
"h an4",
"li ang4",
"sh a1",
"ji a3",
"z i1",
"lv 4",
"f u1",
"xi an1",
"x u4",
"gu ang1",
"m eng2",
"b ao4",
"yo u4",
"r ong2",
"zhi1 yi1",
"w ei1",
"m ao2",
"guo2 jia1",
"c ong2",
"g ou4",
"ti e3",
"zh en1",
"d u2",
"bi an1",
"c i2",
"q u3",
"f an4",
"xi ang3",
"m en2",
"j u1",
"h ong2",
"z i3",
"ta1 men5",
"ji 3",
"z ong1",
"zhou1 de5yi2ge4shi4zhen4",
"t uan2",
"ji ng3",
"gong1 si1",
"xi e4",
"l i2",
"li4 shi3",
"b ao1",
"g ang3",
"gu i1",
"zh eng1",
"zhi2 wu4",
"ta1 de5",
"pi n3",
"zhu an1",
"ch ong2",
"shi3 yong4",
"w a3",
"sh uo1",
"chu an1",
"l ei2",
"w an1",
"h uo2",
"q u",
"s u1",
"z ao3",
"g ai3",
"q u4",
"g u4",
"l u",
"x i2",
"h ang2",
"yi ng4",
"c un1",
"g en1",
"yi ng2",
"ti ng2",
"cheng2 shi4",
"ji ang3",
"li ng3",
"l un2",
"bu4 fen4",
"de ng1",
"xu an3",
"dong4 wu4",
"de2 guo2",
"xi an3",
"f an3",
"zh e5",
"h an2",
"h ao4",
"m i4",
"r an2",
"qi n1",
"ti ao2",
"zh an3",
"h i",
"k a",
"n o",
"t e",
"s u",
"s hi",
"t a",
"t o",
"n a",
"w a",
"o u",
"r u",
"n i",
"k u",
"k i",
"g a",
"d e",
"k o",
"m a",
"r e",
"r a",
"m o",
"t su",
"w o",
"e n",
"r i",
"s a",
"d a",
"s e",
"j i",
"h a",
"c hi",
"k e",
"te ki",
"m i",
"y ou",
"s h",
"s o",
"y o",
"y a",
"na i",
"t te",
"a ru",
"b a",
"u u",
"t ta",
"ka i",
"ka n",
"shi te",
"m e",
"d o",
"mo no",
"se i",
"r o",
"ko to",
"ka ra",
"shi ta",
"b u",
"m u",
"c h",
"su ru",
"k ou",
"g o",
"ma su",
"ta i",
"f u",
"k en",
"i u",
"g en",
"wa re",
"shi n",
"z u",
"a i",
"o n",
"o ku",
"g i",
"d ou",
"n e",
"y uu",
"i ru",
"i te",
"ji ko",
"de su",
"j u",
"ra re",
"sh u",
"b e",
"sh ou",
"s ha",
"se kai",
"s ou",
"k you",
"ma shita",
"s en",
"na ra",
"sa n",
"ke i",
"i ta",
"a ri",
"i tsu",
"ko no",
"j ou",
"na ka",
"ch ou",
"so re",
"g u",
"na ru",
"ga ku",
"re ba",
"g e",
"h o",
"i n",
"hi to",
"sa i",
"na n",
"da i",
"tsu ku",
"shi ki",
"sa re",
"na ku",
"p p",
"bu n",
"ju n",
"so no",
"ka ku",
"z ai",
"b i",
"to u",
"wa ta",
"sh uu",
"i i",
"te i",
"ka re",
"y u",
"shi i",
"ma de",
"sh o",
"a n",
"ke reba",
"shi ka",
"i chi",
"ha n",
"de ki",
"ni n",
"ware ware",
"na kereba",
"o ite",
"h ou",
"ya ku",
"ra i",
"mu jun",
"l e",
"yo ku",
"bu tsu",
"o o",
"ko n",
"o mo",
"ga e",
"nara nai",
"ta chi",
"z en",
"ch uu",
"kan gae",
"ta ra",
"to ki",
"ko ro",
"mujun teki",
"z e",
"na ga",
"ji n",
"shi ma",
"te n",
"i ki",
"i ku",
"no u",
"i masu",
"r ou",
"h on",
"ka e",
"t to",
"ko re",
"ta n",
"ki ta",
"i s",
"da tta",
"ji tsu",
"ma e",
"i e",
"me i",
"da n",
"h e",
"to ku",
"dou itsu",
"ri tsu",
"k yuu",
"h you",
"rare ta",
"kei sei",
"k kan",
"rare ru",
"m ou",
"do ko",
"r you",
"da ke",
"naka tta",
"so ko",
"ta be",
"e r",
"ha na",
"c o",
"fu ku",
"p a",
"so n",
"ya su",
"ch o",
"wata ku",
"ya ma",
"z a",
"k yo",
"gen zai",
"b oku",
"a ta",
"j a",
"ka wa",
"ma sen",
"j uu",
"ro n",
"b o",
"na tte",
"wataku shi",
"yo tte",
"ma i",
"g ou",
"ha i",
"mo n",
"ba n",
"ji shin",
"c a",
"re te",
"n en",
"o ka",
"ka gaku",
"na tta",
"p o",
"ka ru",
"na ri",
"m en",
"ma ta",
"e i",
"ku ru",
"ga i",
"ka ri",
"sha kai",
"kou i",
"yo ri",
"se tsu",
"j o",
"re ru",
"to koro",
"ju tsu",
"i on",
"sa ku",
"tta i",
"c ha",
"nin gen",
"n u",
"c e",
"ta me",
"kan kyou",
"de n",
"o oku",
"i ma",
"wata shi",
"tsuku ru",
"su gi",
"b en",
"ji bun",
"shi tsu",
"ke ru",
"ki n",
"ki shi",
"shika shi",
"mo to",
"ma ri",
"i tte",
"de shita",
"n de",
"ari masu",
"te r",
"z ou",
"ko e",
"ze ttai",
"kkan teki",
"h en",
"re kishi",
"deki ru",
"tsu ka",
"l a",
"i tta",
"o i",
"ko butsu",
"mi ru",
"sh oku",
"shi masu",
"gi jutsu",
"g you",
"jou shiki",
"a tta",
"ho do",
"ko ko",
"tsuku rareta",
"z oku",
"hi tei",
"ko ku",
"rekishi teki",
"ke te",
"o ri",
"i mi",
"ka ko",
"naga ra",
"ka karu",
"shu tai",
"ha ji",
"ma n",
"ta ku",
"ra n",
"douitsu teki",
"z o",
"me te",
"re i",
"tsu u",
"sare te",
"gen jitsu",
"p e",
"s t",
"ba i",
"na wa",
"ji kan",
"wa ru",
"r t",
"a tsu",
"so ku",
"koui teki",
"a ra",
"u ma",
"a no",
"i de",
"ka ta",
"te tsu",
"ga wa",
"ke do",
"re ta",
"mi n",
"sa you",
"tte ru",
"to ri",
"p u",
"ki mi",
"b ou",
"mu ra",
"sare ru",
"ma chi",
"k ya",
"o sa",
"kon na",
"a ku",
"a l",
"sare ta",
"i pp",
"shi ku",
"u chi",
"hito tsu",
"ha tara",
"tachi ba",
"shi ro",
"ka tachi",
"to mo",
"e te",
"me ru",
"ni chi",
"da re",
"ka tta",
"e ru",
"su ki",
"a ge",
"oo ki",
"ma ru",
"mo ku",
"o ko",
"kangae rareru",
"o to",
"tan ni",
"ta da",
"tai teki",
"mo tte",
"ki nou",
"shi nai",
"k ki",
"u e",
"ta ri",
"l i",
"ra nai",
"k kou",
"mi rai",
"pp on",
"go to",
"hi n",
"hi tsu",
"te ru",
"mo chi",
"ka tsu",
"re n",
"n yuu",
"su i",
"zu ka",
"tsu ite",
"no mi",
"su gu",
"ku da",
"tetsu gaku",
"i ka",
"ron ri",
"o ki",
"ni ppon",
"p er",
"shi mashita",
"chi shiki",
"cho kkanteki",
"su ko",
"t ion",
"ku u",
"a na",
"a rou",
"ka tte",
"ku ri",
"i nai",
"hyou gen",
"i shiki",
"do ku",
"a tte",
"a tara",
"to n",
"wa ri",
"ka o",
"sei san",
"hana shi",
"s i",
"ka ke",
"na ji",
"su nawa",
"sunawa chi",
"u go",
"su u",
"ba ra",
"le v",
"hi ro",
"i wa",
"be tsu",
"yo i",
"se ru",
"shite ru",
"rare te",
"to shi",
"se ki",
"tai ritsu",
"wa kara",
"to kyo",
"k ka",
"k yoku",
"u n",
"i ro",
"mi te",
"sa ki",
"kan ji",
"mi ta",
"su be",
"r yoku",
"ma tta",
"kuda sai",
"omo i",
"ta no",
"ware ru",
"co m",
"hitsu you",
"ka shi",
"re nai",
"kan kei",
"a to",
"ga tte",
"o chi",
"mo tsu",
"in g",
"son zai",
"l l",
"o re",
"tai shite",
"a me",
"sei mei",
"ka no",
"gi ri",
"kangae ru",
"yu e",
"a sa",
"o naji",
"yo ru",
"ni ku",
"osa ka",
"suko shi",
"c k",
"ta ma",
"kano jo",
"ki te",
"mon dai",
"a mari",
"e ki",
"ko jin",
"ha ya",
"i t",
"de te",
"atara shii",
"a wa",
"ga kkou",
"tsu zu",
"shu kan",
"i mashita",
"mi na",
"ata e",
"da rou",
"hatara ku",
"ga ta",
"da chi",
"ma tsu",
"ari masen",
"sei butsu",
"mi tsu",
"he ya",
"yasu i",
"d i",
"de ni",
"no ko",
"ha ha",
"do mo",
"ka mi",
"su deni",
"na o",
"ra ku",
"i ke",
"a ki",
"me ta",
"l o",
"ko domo",
"so shite",
"ga me",
"ba kari",
"to te",
"ha tsu",
"mi se",
"moku teki",
"da kara"
]
}
} | 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/shortest-path.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Functions to find shortest paths in an FST.
#ifndef FST_SHORTEST_PATH_H_
#define FST_SHORTEST_PATH_H_
#include <functional>
#include <type_traits>
#include <utility>
#include <vector>
#include <fst/log.h>
#include <fst/cache.h>
#include <fst/determinize.h>
#include <fst/queue.h>
#include <fst/shortest-distance.h>
#include <fst/test-properties.h>
namespace fst {
template <class Arc, class Queue, class ArcFilter>
struct ShortestPathOptions
: public ShortestDistanceOptions<Arc, Queue, ArcFilter> {
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
int32_t nshortest; // Returns n-shortest paths.
bool unique; // Only returns paths with distinct input strings.
bool has_distance; // Distance vector already contains the
// shortest distance from the initial state.
bool first_path; // Single shortest path stops after finding the first
// path to a final state; that path is the shortest path
// only when:
// (1) using the ShortestFirstQueue with all the weights
// in the FST being between One() and Zero() according to
// NaturalLess or when
// (2) using the NaturalAStarQueue with an admissible
// and consistent estimate.
Weight weight_threshold; // Pruning weight threshold.
StateId state_threshold; // Pruning state threshold.
ShortestPathOptions(Queue *queue, ArcFilter filter, int32_t nshortest = 1,
bool unique = false, bool has_distance = false,
float delta = kShortestDelta, bool first_path = false,
Weight weight_threshold = Weight::Zero(),
StateId state_threshold = kNoStateId)
: ShortestDistanceOptions<Arc, Queue, ArcFilter>(queue, filter,
kNoStateId, delta),
nshortest(nshortest),
unique(unique),
has_distance(has_distance),
first_path(first_path),
weight_threshold(std::move(weight_threshold)),
state_threshold(state_threshold) {}
};
namespace internal {
constexpr size_t kNoArc = -1;
// Helper function for SingleShortestPath building the shortest path as a left-
// to-right machine backwards from the best final state. It takes the input
// FST passed to SingleShortestPath and the parent vector and f_parent returned
// by that function, and builds the result into the provided output mutable FS
// This is not normally called by users; see ShortestPath instead.
template <class Arc>
void SingleShortestPathBacktrace(
const Fst<Arc> &ifst, MutableFst<Arc> *ofst,
const std::vector<std::pair<typename Arc::StateId, size_t>> &parent,
typename Arc::StateId f_parent) {
using StateId = typename Arc::StateId;
ofst->DeleteStates();
ofst->SetInputSymbols(ifst.InputSymbols());
ofst->SetOutputSymbols(ifst.OutputSymbols());
StateId s_p = kNoStateId;
StateId d_p = kNoStateId;
for (StateId state = f_parent, d = kNoStateId; state != kNoStateId;
d = state, state = parent[state].first) {
d_p = s_p;
s_p = ofst->AddState();
if (d == kNoStateId) {
ofst->SetFinal(s_p, ifst.Final(f_parent));
} else {
ArcIterator<Fst<Arc>> aiter(ifst, state);
aiter.Seek(parent[d].second);
auto arc = aiter.Value();
arc.nextstate = d_p;
ofst->AddArc(s_p, arc);
}
}
ofst->SetStart(s_p);
if (ifst.Properties(kError, false)) ofst->SetProperties(kError, kError);
ofst->SetProperties(
ShortestPathProperties(ofst->Properties(kFstProperties, false), true),
kFstProperties);
}
// Helper function for SingleShortestPath building a tree of shortest paths to
// every final state in the input FST. It takes the input FST and parent values
// computed by SingleShortestPath and builds into the output mutable FST the
// subtree of ifst that consists only of the best paths to all final states.
// This is not normally called by users; see ShortestPath instead.
template <class Arc>
void SingleShortestTree(
const Fst<Arc> &ifst, MutableFst<Arc> *ofst,
const std::vector<std::pair<typename Arc::StateId, size_t>> &parent) {
ofst->DeleteStates();
ofst->SetInputSymbols(ifst.InputSymbols());
ofst->SetOutputSymbols(ifst.OutputSymbols());
ofst->SetStart(ifst.Start());
for (StateIterator<Fst<Arc>> siter(ifst); !siter.Done(); siter.Next()) {
ofst->AddState();
ofst->SetFinal(siter.Value(), ifst.Final(siter.Value()));
}
for (const auto &pair : parent) {
if (pair.first != kNoStateId && pair.second != kNoArc) {
ArcIterator<Fst<Arc>> aiter(ifst, pair.first);
aiter.Seek(pair.second);
ofst->AddArc(pair.first, aiter.Value());
}
}
if (ifst.Properties(kError, false)) ofst->SetProperties(kError, kError);
ofst->SetProperties(
ShortestPathProperties(ofst->Properties(kFstProperties, false), true),
kFstProperties);
}
// Implements the stopping criterion when ShortestPathOptions::first_path
// is set to true:
// operator()(s, d, f) == true
// iff every successful path through state 's' has a cost greater or equal
// to 'f' under the assumption that 'd' is the shortest distance to state 's'.
// Correct when using the ShortestFirstQueue with all the weights in the FST
// being between One() and Zero() according to NaturalLess
template <typename S, typename W, typename Queue>
struct FirstPathSelect {
FirstPathSelect(const Queue &) {}
bool operator()(S s, W d, W f) const { return f == Plus(d, f); }
};
// Specialisation for A*.
// Correct when the estimate is admissible and consistent.
template <typename S, typename W, typename Estimate>
class FirstPathSelect<S, W, NaturalAStarQueue<S, W, Estimate>> {
public:
using Queue = NaturalAStarQueue<S, W, Estimate>;
FirstPathSelect(const Queue &state_queue)
: estimate_(state_queue.GetCompare().GetEstimate()) {}
bool operator()(S s, W d, W f) const {
return f == Plus(Times(d, estimate_(s)), f);
}
private:
const Estimate &estimate_;
};
// Shortest-path algorithm. It builds the output mutable FST so that it contains
// the shortest path in the input FST; distance returns the shortest distances
// from the source state to each state in the input FST, and the options struct
// is
// used to specify options such as the queue discipline, the arc filter and
// delta. The super_final option is an output parameter indicating the final
// state, and the parent argument is used for the storage of the backtrace path
// for each state 1 to n, (i.e., the best previous state and the arc that
// transition to state n.) The shortest path is the lowest weight path w.r.t.
// the natural semiring order. The weights need to be right distributive and
// have the path (kPath) property. False is returned if an error is encountered.
//
// This is not normally called by users; see ShortestPath instead (with n = 1).
template <class Arc, class Queue, class ArcFilter>
bool SingleShortestPath(
const Fst<Arc> &ifst, std::vector<typename Arc::Weight> *distance,
const ShortestPathOptions<Arc, Queue, ArcFilter> &opts,
typename Arc::StateId *f_parent,
std::vector<std::pair<typename Arc::StateId, size_t>> *parent) {
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
static_assert(IsPath<Weight>::value, "Weight must have path property.");
static_assert((Weight::Properties() & kRightSemiring) == kRightSemiring,
"Weight must be right distributive.");
parent->clear();
*f_parent = kNoStateId;
if (ifst.Start() == kNoStateId) return true;
std::vector<bool> enqueued;
auto state_queue = opts.state_queue;
const auto source = (opts.source == kNoStateId) ? ifst.Start() : opts.source;
bool final_seen = false;
auto f_distance = Weight::Zero();
distance->clear();
state_queue->Clear();
while (distance->size() < source) {
distance->push_back(Weight::Zero());
enqueued.push_back(false);
parent->push_back(std::make_pair(kNoStateId, kNoArc));
}
distance->push_back(Weight::One());
parent->push_back(std::make_pair(kNoStateId, kNoArc));
state_queue->Enqueue(source);
enqueued.push_back(true);
while (!state_queue->Empty()) {
const auto s = state_queue->Head();
state_queue->Dequeue();
enqueued[s] = false;
const auto sd = (*distance)[s];
// If we are using a shortest queue, no other path is going to be shorter
// than f_distance at this point.
using FirstPath = FirstPathSelect<StateId, Weight, Queue>;
if (opts.first_path && final_seen &&
FirstPath(*state_queue)(s, sd, f_distance)) {
break;
}
if (ifst.Final(s) != Weight::Zero()) {
const auto plus = Plus(f_distance, Times(sd, ifst.Final(s)));
if (f_distance != plus) {
f_distance = plus;
*f_parent = s;
}
if (!f_distance.Member()) return false;
final_seen = true;
}
for (ArcIterator<Fst<Arc>> aiter(ifst, s); !aiter.Done(); aiter.Next()) {
const auto &arc = aiter.Value();
while (distance->size() <= arc.nextstate) {
distance->push_back(Weight::Zero());
enqueued.push_back(false);
parent->push_back(std::make_pair(kNoStateId, kNoArc));
}
auto &nd = (*distance)[arc.nextstate];
const auto weight = Times(sd, arc.weight);
if (nd != Plus(nd, weight)) {
nd = Plus(nd, weight);
if (!nd.Member()) return false;
(*parent)[arc.nextstate] = std::make_pair(s, aiter.Position());
if (!enqueued[arc.nextstate]) {
state_queue->Enqueue(arc.nextstate);
enqueued[arc.nextstate] = true;
} else {
state_queue->Update(arc.nextstate);
}
}
}
}
return true;
}
template <class StateId, class Weight>
class ShortestPathCompare {
public:
ShortestPathCompare(const std::vector<std::pair<StateId, Weight>> &pairs,
const std::vector<Weight> &distance, StateId superfinal,
float delta)
: pairs_(pairs),
distance_(distance),
superfinal_(superfinal),
delta_(delta) {}
bool operator()(const StateId x, const StateId y) const {
const auto &px = pairs_[x];
const auto &py = pairs_[y];
const auto wx = Times(PWeight(px.first), px.second);
const auto wy = Times(PWeight(py.first), py.second);
// Penalize complete paths to ensure correct results with inexact weights.
// This forms a strict weak order so long as ApproxEqual(a, b) =>
// ApproxEqual(a, c) for all c s.t. less_(a, c) && less_(c, b).
if (px.first == superfinal_ && py.first != superfinal_) {
return less_(wy, wx) || ApproxEqual(wx, wy, delta_);
} else if (py.first == superfinal_ && px.first != superfinal_) {
return less_(wy, wx) && !ApproxEqual(wx, wy, delta_);
} else {
return less_(wy, wx);
}
}
private:
Weight PWeight(StateId state) const {
return (state == superfinal_)
? Weight::One()
: (state < distance_.size()) ? distance_[state] : Weight::Zero();
}
const std::vector<std::pair<StateId, Weight>> &pairs_;
const std::vector<Weight> &distance_;
const StateId superfinal_;
const float delta_;
NaturalLess<Weight> less_;
};
// N-Shortest-path algorithm: implements the core n-shortest path algorithm.
// The output is built reversed. See below for versions with more options and
// *not reversed*.
//
// The output mutable FST contains the REVERSE of n'shortest paths in the input
// FST; distance must contain the shortest distance from each state to a final
// state in the input FST; delta is the convergence delta.
//
// The n-shortest paths are the n-lowest weight paths w.r.t. the natural
// semiring order. The single path that can be read from the ith of at most n
// transitions leaving the initial state of the input FST is the ith shortest
// path. Disregarding the initial state and initial transitions, the
// n-shortest paths, in fact, form a tree rooted at the single final state.
//
// The weights need to be left and right distributive (kSemiring) and have the
// path (kPath) property.
//
// Arc weights must satisfy the property that the sum of the weights of one or
// more paths from some state S to T is never Zero(). In particular, arc weights
// are never Zero().
//
// For more information, see:
//
// Mohri, M, and Riley, M. 2002. An efficient algorithm for the n-best-strings
// problem. In Proc. ICSLP.
//
// The algorithm relies on the shortest-distance algorithm. There are some
// issues with the pseudo-code as written in the paper (viz., line 11).
//
// IMPLEMENTATION NOTE: The input FST can be a delayed FST and at any state in
// its expansion the values of distance vector need only be defined at that time
// for the states that are known to exist.
template <class Arc, class RevArc>
void NShortestPath(const Fst<RevArc> &ifst, MutableFst<Arc> *ofst,
const std::vector<typename Arc::Weight> &distance,
int32_t nshortest, float delta = kShortestDelta,
typename Arc::Weight weight_threshold = Arc::Weight::Zero(),
typename Arc::StateId state_threshold = kNoStateId) {
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using Pair = std::pair<StateId, Weight>;
static_assert((Weight::Properties() & kPath) == kPath,
"Weight must have path property.");
static_assert((Weight::Properties() & kSemiring) == kSemiring,
"Weight must be distributive.");
if (nshortest <= 0) return;
ofst->DeleteStates();
ofst->SetInputSymbols(ifst.InputSymbols());
ofst->SetOutputSymbols(ifst.OutputSymbols());
// Each state in ofst corresponds to a path with weight w from the initial
// state of ifst to a state s in ifst, that can be characterized by a pair
// (s, w). The vector pairs maps each state in ofst to the corresponding
// pair maps states in ofst to the corresponding pair (s, w).
std::vector<Pair> pairs;
// The supefinal state is denoted by kNoStateId. The distance from the
// superfinal state to the final state is semiring One, so
// `distance[kNoStateId]` is not needed.
const ShortestPathCompare<StateId, Weight> compare(pairs, distance,
kNoStateId, delta);
const NaturalLess<Weight> less;
if (ifst.Start() == kNoStateId || distance.size() <= ifst.Start() ||
distance[ifst.Start()] == Weight::Zero() ||
less(weight_threshold, Weight::One()) || state_threshold == 0) {
if (ifst.Properties(kError, false)) ofst->SetProperties(kError, kError);
return;
}
ofst->SetStart(ofst->AddState());
const auto final_state = ofst->AddState();
ofst->SetFinal(final_state, Weight::One());
while (pairs.size() <= final_state) {
pairs.push_back(std::make_pair(kNoStateId, Weight::Zero()));
}
pairs[final_state] = std::make_pair(ifst.Start(), Weight::One());
std::vector<StateId> heap;
heap.push_back(final_state);
const auto limit = Times(distance[ifst.Start()], weight_threshold);
// r[s + 1], s state in fst, is the number of states in ofst which
// corresponding pair contains s, i.e., it is number of paths computed so far
// to s. Valid for s == kNoStateId (the superfinal state).
std::vector<int> r;
while (!heap.empty()) {
std::pop_heap(heap.begin(), heap.end(), compare);
const auto state = heap.back();
const auto p = pairs[state];
heap.pop_back();
const auto d =
(p.first == kNoStateId)
? Weight::One()
: (p.first < distance.size()) ? distance[p.first] : Weight::Zero();
if (less(limit, Times(d, p.second)) ||
(state_threshold != kNoStateId &&
ofst->NumStates() >= state_threshold)) {
continue;
}
while (r.size() <= p.first + 1) r.push_back(0);
++r[p.first + 1];
if (p.first == kNoStateId) {
ofst->AddArc(ofst->Start(), Arc(0, 0, Weight::One(), state));
}
if ((p.first == kNoStateId) && (r[p.first + 1] == nshortest)) break;
if (r[p.first + 1] > nshortest) continue;
if (p.first == kNoStateId) continue;
for (ArcIterator<Fst<RevArc>> aiter(ifst, p.first); !aiter.Done();
aiter.Next()) {
const auto &rarc = aiter.Value();
Arc arc(rarc.ilabel, rarc.olabel, rarc.weight.Reverse(), rarc.nextstate);
const auto weight = Times(p.second, arc.weight);
const auto next = ofst->AddState();
pairs.push_back(std::make_pair(arc.nextstate, weight));
arc.nextstate = state;
ofst->AddArc(next, arc);
heap.push_back(next);
std::push_heap(heap.begin(), heap.end(), compare);
}
const auto final_weight = ifst.Final(p.first).Reverse();
if (final_weight != Weight::Zero()) {
const auto weight = Times(p.second, final_weight);
const auto next = ofst->AddState();
pairs.push_back(std::make_pair(kNoStateId, weight));
ofst->AddArc(next, Arc(0, 0, final_weight, state));
heap.push_back(next);
std::push_heap(heap.begin(), heap.end(), compare);
}
}
Connect(ofst);
if (ifst.Properties(kError, false)) ofst->SetProperties(kError, kError);
ofst->SetProperties(
ShortestPathProperties(ofst->Properties(kFstProperties, false)),
kFstProperties);
}
} // namespace internal
// N-Shortest-path algorithm: this version allows finer control via the options
// argument. See below for a simpler interface. The output mutable FST contains
// the n-shortest paths in the input FST; the distance argument is used to
// return the shortest distances from the source state to each state in the
// input FST, and the options struct is used to specify the number of paths to
// return, whether they need to have distinct input strings, the queue
// discipline, the arc filter and the convergence delta.
//
// The n-shortest paths are the n-lowest weight paths w.r.t. the natural
// semiring order. The single path that can be read from the ith of at most n
// transitions leaving the initial state of the output FST is the ith shortest
// path.
// Disregarding the initial state and initial transitions, The n-shortest paths,
// in fact, form a tree rooted at the single final state.
//
// The weights need to be right distributive and have the path (kPath) property.
// They need to be left distributive as well for nshortest > 1.
//
// For more information, see:
//
// Mohri, M, and Riley, M. 2002. An efficient algorithm for the n-best-strings
// problem. In Proc. ICSLP.
//
// The algorithm relies on the shortest-distance algorithm. There are some
// issues with the pseudo-code as written in the paper (viz., line 11).
template <class Arc, class Queue, class ArcFilter,
typename std::enable_if<IsPath<typename Arc::Weight>::value>::type * =
nullptr>
void ShortestPath(const Fst<Arc> &ifst, MutableFst<Arc> *ofst,
std::vector<typename Arc::Weight> *distance,
const ShortestPathOptions<Arc, Queue, ArcFilter> &opts) {
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using RevArc = ReverseArc<Arc>;
if (opts.nshortest == 1) {
std::vector<std::pair<StateId, size_t>> parent;
StateId f_parent;
if (internal::SingleShortestPath(ifst, distance, opts, &f_parent,
&parent)) {
internal::SingleShortestPathBacktrace(ifst, ofst, parent, f_parent);
} else {
ofst->SetProperties(kError, kError);
}
return;
}
if (opts.nshortest <= 0) return;
if (!opts.has_distance) {
ShortestDistance(ifst, distance, opts);
if (distance->size() == 1 && !(*distance)[0].Member()) {
ofst->SetProperties(kError, kError);
return;
}
}
// Algorithm works on the reverse of 'fst'; 'distance' is the distance to the
// final state in 'rfst', 'ofst' is built as the reverse of the tree of
// n-shortest path in 'rfst'.
VectorFst<RevArc> rfst;
Reverse(ifst, &rfst);
auto d = Weight::Zero();
for (ArcIterator<VectorFst<RevArc>> aiter(rfst, 0); !aiter.Done();
aiter.Next()) {
const auto &arc = aiter.Value();
const auto state = arc.nextstate - 1;
if (state < distance->size()) {
d = Plus(d, Times(arc.weight.Reverse(), (*distance)[state]));
}
}
// TODO(kbg): Avoid this expensive vector operation.
distance->insert(distance->begin(), d);
if (!opts.unique) {
internal::NShortestPath(rfst, ofst, *distance, opts.nshortest, opts.delta,
opts.weight_threshold, opts.state_threshold);
} else {
std::vector<Weight> ddistance;
DeterminizeFstOptions<RevArc> dopts(opts.delta);
DeterminizeFst<RevArc> dfst(rfst, distance, &ddistance, dopts);
internal::NShortestPath(dfst, ofst, ddistance, opts.nshortest, opts.delta,
opts.weight_threshold, opts.state_threshold);
}
// TODO(kbg): Avoid this expensive vector operation.
distance->erase(distance->begin());
}
template <class Arc, class Queue, class ArcFilter,
typename std::enable_if<!IsPath<typename Arc::Weight>::value>::type
* = nullptr>
void ShortestPath(const Fst<Arc> &, MutableFst<Arc> *ofst,
std::vector<typename Arc::Weight> *,
const ShortestPathOptions<Arc, Queue, ArcFilter> &) {
FSTERROR() << "ShortestPath: Weight needs to have the "
<< "path property and be distributive: " << Arc::Weight::Type();
ofst->SetProperties(kError, kError);
}
// Shortest-path algorithm: simplified interface. See above for a version that
// allows finer control. The output mutable FST contains the n-shortest paths
// in the input FST. The queue discipline is automatically selected. When unique
// is true, only paths with distinct input label sequences are returned.
//
// The n-shortest paths are the n-lowest weight paths w.r.t. the natural
// semiring order. The single path that can be read from the ith of at most n
// transitions leaving the initial state of the output FST is the ith best path.
// The weights need to be right distributive and have the path (kPath) property.
template <class Arc>
void ShortestPath(const Fst<Arc> &ifst, MutableFst<Arc> *ofst,
int32_t nshortest = 1, bool unique = false,
bool first_path = false,
typename Arc::Weight weight_threshold = Arc::Weight::Zero(),
typename Arc::StateId state_threshold = kNoStateId,
float delta = kShortestDelta) {
using StateId = typename Arc::StateId;
std::vector<typename Arc::Weight> distance;
AnyArcFilter<Arc> arc_filter;
AutoQueue<StateId> state_queue(ifst, &distance, arc_filter);
const ShortestPathOptions<Arc, AutoQueue<StateId>, AnyArcFilter<Arc>> opts(
&state_queue, arc_filter, nshortest, unique, false, delta, first_path,
weight_threshold, state_threshold);
ShortestPath(ifst, ofst, &distance, opts);
}
} // namespace fst
#endif // FST_SHORTEST_PATH_H_
| 0 |
coqui_public_repos/inference-engine/third_party/kenlm/lm | coqui_public_repos/inference-engine/third_party/kenlm/lm/interpolate/merge_vocab.hh | #ifndef LM_INTERPOLATE_MERGE_VOCAB_H
#define LM_INTERPOLATE_MERGE_VOCAB_H
#include "lm/word_index.hh"
#include "util/file.hh"
#include "util/fixed_array.hh"
namespace lm {
class EnumerateVocab;
namespace interpolate {
class UniversalVocab;
// The combined vocabulary is enumerated with enumerate.
// Returns the size of the combined vocabulary.
// Does not take ownership of vocab_files.
WordIndex MergeVocab(util::FixedArray<int> &vocab_files, UniversalVocab &vocab, EnumerateVocab &enumerate);
}} // namespaces
#endif // LM_INTERPOLATE_MERGE_VOCAB_H
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/compact/compact16_string-fst.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/fst.h>
#include <fst/compact-fst.h>
namespace fst {
static FstRegisterer<CompactStringFst<StdArc, uint16>>
CompactStringFst_StdArc_uint16_registerer;
static FstRegisterer<CompactStringFst<LogArc, uint16>>
CompactStringFst_LogArc_uint16_registerer;
} // namespace fst
| 0 |
coqui_public_repos/inference-engine/third_party/kenlm/lm | coqui_public_repos/inference-engine/third_party/kenlm/lm/interpolate/backoff_matrix.hh | #ifndef LM_INTERPOLATE_BACKOFF_MATRIX_H
#define LM_INTERPOLATE_BACKOFF_MATRIX_H
#include <cstddef>
#include <vector>
namespace lm { namespace interpolate {
class BackoffMatrix {
public:
BackoffMatrix(std::size_t num_models, std::size_t max_order)
: max_order_(max_order), backing_(num_models * max_order) {}
float &Backoff(std::size_t model, std::size_t order_minus_1) {
return backing_[model * max_order_ + order_minus_1];
}
float Backoff(std::size_t model, std::size_t order_minus_1) const {
return backing_[model * max_order_ + order_minus_1];
}
private:
const std::size_t max_order_;
std::vector<float> backing_;
};
}} // namespaces
#endif // LM_INTERPOLATE_BACKOFF_MATRIX_H
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/lookahead/ilabel_lookahead-fst.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/fst.h>
#include <fst/matcher-fst.h>
namespace fst {
static FstRegisterer<StdILabelLookAheadFst>
ILabelLookAheadFst_StdArc_registerer;
static FstRegisterer<MatcherFst<
ConstFst<LogArc>,
LabelLookAheadMatcher<SortedMatcher<ConstFst<LogArc>>,
ilabel_lookahead_flags, FastLogAccumulator<LogArc>>,
ilabel_lookahead_fst_type, LabelLookAheadRelabeler<LogArc>>>
ILabelLookAheadFst_LogArc_registerer;
static FstRegisterer<MatcherFst<
ConstFst<Log64Arc>,
LabelLookAheadMatcher<SortedMatcher<ConstFst<Log64Arc>>,
ilabel_lookahead_flags, FastLogAccumulator<Log64Arc>>,
ilabel_lookahead_fst_type, LabelLookAheadRelabeler<Log64Arc>>>
ILabelLookAheadFst_Log64Arc_registerer;
} // namespace fst
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7 | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/m4/ltoptions.m4 | # Helper functions for option handling. -*- Autoconf -*-
#
# Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
# Inc.
# Written by Gary V. Vaughan, 2004
#
# This file is free software; the Free Software Foundation gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
# serial 7 ltoptions.m4
# This is to help aclocal find these macros, as it can't see m4_define.
AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
# ------------------------------------------
m4_define([_LT_MANGLE_OPTION],
[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
# ---------------------------------------
# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
# matching handler defined, dispatch to it. Other OPTION-NAMEs are
# saved as a flag.
m4_define([_LT_SET_OPTION],
[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
_LT_MANGLE_DEFUN([$1], [$2]),
[m4_warning([Unknown $1 option `$2'])])[]dnl
])
# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
# ------------------------------------------------------------
# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
m4_define([_LT_IF_OPTION],
[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
# -------------------------------------------------------
# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
# are set.
m4_define([_LT_UNLESS_OPTIONS],
[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
[m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
[m4_define([$0_found])])])[]dnl
m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
])[]dnl
])
# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
# ----------------------------------------
# OPTION-LIST is a space-separated list of Libtool options associated
# with MACRO-NAME. If any OPTION has a matching handler declared with
# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
# the unknown option and exit.
m4_defun([_LT_SET_OPTIONS],
[# Set options
m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
[_LT_SET_OPTION([$1], _LT_Option)])
m4_if([$1],[LT_INIT],[
dnl
dnl Simply set some default values (i.e off) if boolean options were not
dnl specified:
_LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
])
_LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
])
dnl
dnl If no reference was made to various pairs of opposing options, then
dnl we run the default mode handler for the pair. For example, if neither
dnl `shared' nor `disable-shared' was passed, we enable building of shared
dnl archives by default:
_LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
_LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
_LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
_LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
[_LT_ENABLE_FAST_INSTALL])
])
])# _LT_SET_OPTIONS
## --------------------------------- ##
## Macros to handle LT_INIT options. ##
## --------------------------------- ##
# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
# -----------------------------------------
m4_define([_LT_MANGLE_DEFUN],
[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
# -----------------------------------------------
m4_define([LT_OPTION_DEFINE],
[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
])# LT_OPTION_DEFINE
# dlopen
# ------
LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
])
AU_DEFUN([AC_LIBTOOL_DLOPEN],
[_LT_SET_OPTION([LT_INIT], [dlopen])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `dlopen' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
# win32-dll
# ---------
# Declare package support for building win32 dll's.
LT_OPTION_DEFINE([LT_INIT], [win32-dll],
[enable_win32_dll=yes
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
AC_CHECK_TOOL(AS, as, false)
AC_CHECK_TOOL(DLLTOOL, dlltool, false)
AC_CHECK_TOOL(OBJDUMP, objdump, false)
;;
esac
test -z "$AS" && AS=as
_LT_DECL([], [AS], [1], [Assembler program])dnl
test -z "$DLLTOOL" && DLLTOOL=dlltool
_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
test -z "$OBJDUMP" && OBJDUMP=objdump
_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
])# win32-dll
AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
[AC_REQUIRE([AC_CANONICAL_HOST])dnl
_LT_SET_OPTION([LT_INIT], [win32-dll])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `win32-dll' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
# _LT_ENABLE_SHARED([DEFAULT])
# ----------------------------
# implement the --enable-shared flag, and supports the `shared' and
# `disable-shared' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_SHARED],
[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([shared],
[AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_shared=yes ;;
no) enable_shared=no ;;
*)
enable_shared=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_shared=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
_LT_DECL([build_libtool_libs], [enable_shared], [0],
[Whether or not to build shared libraries])
])# _LT_ENABLE_SHARED
LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
# Old names:
AC_DEFUN([AC_ENABLE_SHARED],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
])
AC_DEFUN([AC_DISABLE_SHARED],
[_LT_SET_OPTION([LT_INIT], [disable-shared])
])
AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AM_ENABLE_SHARED], [])
dnl AC_DEFUN([AM_DISABLE_SHARED], [])
# _LT_ENABLE_STATIC([DEFAULT])
# ----------------------------
# implement the --enable-static flag, and support the `static' and
# `disable-static' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_STATIC],
[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([static],
[AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_static=yes ;;
no) enable_static=no ;;
*)
enable_static=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_static=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_static=]_LT_ENABLE_STATIC_DEFAULT)
_LT_DECL([build_old_libs], [enable_static], [0],
[Whether or not to build static libraries])
])# _LT_ENABLE_STATIC
LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
# Old names:
AC_DEFUN([AC_ENABLE_STATIC],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
])
AC_DEFUN([AC_DISABLE_STATIC],
[_LT_SET_OPTION([LT_INIT], [disable-static])
])
AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AM_ENABLE_STATIC], [])
dnl AC_DEFUN([AM_DISABLE_STATIC], [])
# _LT_ENABLE_FAST_INSTALL([DEFAULT])
# ----------------------------------
# implement the --enable-fast-install flag, and support the `fast-install'
# and `disable-fast-install' LT_INIT options.
# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
m4_define([_LT_ENABLE_FAST_INSTALL],
[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
AC_ARG_ENABLE([fast-install],
[AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
[optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
[p=${PACKAGE-default}
case $enableval in
yes) enable_fast_install=yes ;;
no) enable_fast_install=no ;;
*)
enable_fast_install=no
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for pkg in $enableval; do
IFS="$lt_save_ifs"
if test "X$pkg" = "X$p"; then
enable_fast_install=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
_LT_DECL([fast_install], [enable_fast_install], [0],
[Whether or not to optimize for fast installation])dnl
])# _LT_ENABLE_FAST_INSTALL
LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
# Old names:
AU_DEFUN([AC_ENABLE_FAST_INSTALL],
[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you put
the `fast-install' option into LT_INIT's first parameter.])
])
AU_DEFUN([AC_DISABLE_FAST_INSTALL],
[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you put
the `disable-fast-install' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
# _LT_WITH_PIC([MODE])
# --------------------
# implement the --with-pic flag, and support the `pic-only' and `no-pic'
# LT_INIT options.
# MODE is either `yes' or `no'. If omitted, it defaults to `both'.
m4_define([_LT_WITH_PIC],
[AC_ARG_WITH([pic],
[AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
[lt_p=${PACKAGE-default}
case $withval in
yes|no) pic_mode=$withval ;;
*)
pic_mode=default
# Look at the argument we got. We use all the common list separators.
lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
for lt_pkg in $withval; do
IFS="$lt_save_ifs"
if test "X$lt_pkg" = "X$lt_p"; then
pic_mode=yes
fi
done
IFS="$lt_save_ifs"
;;
esac],
[pic_mode=default])
test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
])# _LT_WITH_PIC
LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
# Old name:
AU_DEFUN([AC_LIBTOOL_PICMODE],
[_LT_SET_OPTION([LT_INIT], [pic-only])
AC_DIAGNOSE([obsolete],
[$0: Remove this warning and the call to _LT_SET_OPTION when you
put the `pic-only' option into LT_INIT's first parameter.])
])
dnl aclocal-1.4 backwards compatibility:
dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
## ----------------- ##
## LTDL_INIT Options ##
## ----------------- ##
m4_define([_LTDL_MODE], [])
LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
[m4_define([_LTDL_MODE], [nonrecursive])])
LT_OPTION_DEFINE([LTDL_INIT], [recursive],
[m4_define([_LTDL_MODE], [recursive])])
LT_OPTION_DEFINE([LTDL_INIT], [subproject],
[m4_define([_LTDL_MODE], [subproject])])
m4_define([_LTDL_TYPE], [])
LT_OPTION_DEFINE([LTDL_INIT], [installable],
[m4_define([_LTDL_TYPE], [installable])])
LT_OPTION_DEFINE([LTDL_INIT], [convenience],
[m4_define([_LTDL_TYPE], [convenience])])
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/bin/import_gram_vaani.py | #!/usr/bin/env python
import csv
import logging
import math
import os
import subprocess
import urllib
from pathlib import Path
import pandas as pd
import swifter
from coqui_stt_training.util.importers import get_importers_parser, get_validate_label
from sox import Transformer
__version__ = "0.1.0"
_logger = logging.getLogger(__name__)
MAX_SECS = 10
BITDEPTH = 16
N_CHANNELS = 1
SAMPLE_RATE = 16000
DEV_PERCENTAGE = 0.10
TRAIN_PERCENTAGE = 0.80
def parse_args(args):
"""Parse command line parameters
Args:
args ([str]): Command line parameters as list of strings
Returns:
:obj:`argparse.Namespace`: command line parameters namespace
"""
parser = get_importers_parser(description="Imports GramVaani data for Deep Speech")
parser.add_argument(
"--version",
action="version",
version="GramVaaniImporter {ver}".format(ver=__version__),
)
parser.add_argument(
"-v",
"--verbose",
action="store_const",
required=False,
help="set loglevel to INFO",
dest="loglevel",
const=logging.INFO,
)
parser.add_argument(
"-vv",
"--very-verbose",
action="store_const",
required=False,
help="set loglevel to DEBUG",
dest="loglevel",
const=logging.DEBUG,
)
parser.add_argument(
"-c",
"--csv_filename",
required=True,
help="Path to the GramVaani csv",
dest="csv_filename",
)
parser.add_argument(
"-t",
"--target_dir",
required=True,
help="Directory in which to save the importer GramVaani data",
dest="target_dir",
)
return parser.parse_args(args)
def setup_logging(level):
"""Setup basic logging
Args:
level (int): minimum log level for emitting messages
"""
format = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
logging.basicConfig(
level=level, stream=sys.stdout, format=format, datefmt="%Y-%m-%d %H:%M:%S"
)
class GramVaaniCSV:
"""GramVaaniCSV representing a GramVaani dataset.
Args:
csv_filename (str): Path to the GramVaani csv
Attributes:
data (:class:`pandas.DataFrame`): `pandas.DataFrame` Containing the GramVaani csv data
"""
def __init__(self, csv_filename):
self.data = self._parse_csv(csv_filename)
def _parse_csv(self, csv_filename):
_logger.info("Parsing csv file...%s", os.path.abspath(csv_filename))
data = pd.read_csv(
os.path.abspath(csv_filename),
names=[
"piece_id",
"audio_url",
"transcript_labelled",
"transcript",
"labels",
"content_filename",
"audio_length",
"user_id",
],
usecols=["audio_url", "transcript", "audio_length"],
skiprows=[0],
engine="python",
encoding="utf-8",
quotechar='"',
quoting=csv.QUOTE_ALL,
)
data.dropna(inplace=True)
_logger.info("Parsed %d lines csv file." % len(data))
return data
class GramVaaniDownloader:
"""GramVaaniDownloader downloads a GramVaani dataset.
Args:
gram_vaani_csv (GramVaaniCSV): A GramVaaniCSV representing the data to download
target_dir (str): The path to download the data to
Attributes:
data (:class:`pandas.DataFrame`): `pandas.DataFrame` Containing the GramVaani csv data
"""
def __init__(self, gram_vaani_csv, target_dir):
self.target_dir = target_dir
self.data = gram_vaani_csv.data
def download(self):
"""Downloads the data associated with this instance
Return:
mp3_directory (os.path): The directory into which the associated mp3's were downloaded
"""
mp3_directory = self._pre_download()
self.data.swifter.apply(
func=lambda arg: self._download(*arg, mp3_directory), axis=1, raw=True
)
return mp3_directory
def _pre_download(self):
mp3_directory = os.path.join(self.target_dir, "mp3")
if not os.path.exists(self.target_dir):
_logger.info("Creating directory...%s", self.target_dir)
os.mkdir(self.target_dir)
if not os.path.exists(mp3_directory):
_logger.info("Creating directory...%s", mp3_directory)
os.mkdir(mp3_directory)
return mp3_directory
def _download(self, audio_url, transcript, audio_length, mp3_directory):
if audio_url == "audio_url":
return
mp3_filename = os.path.join(mp3_directory, os.path.basename(audio_url))
if not os.path.exists(mp3_filename):
_logger.debug("Downloading mp3 file...%s", audio_url)
urllib.request.urlretrieve(audio_url, mp3_filename)
else:
_logger.debug("Already downloaded mp3 file...%s", audio_url)
class GramVaaniConverter:
"""GramVaaniConverter converts the mp3's to wav's for a GramVaani dataset.
Args:
target_dir (str): The path to download the data from
mp3_directory (os.path): The path containing the GramVaani mp3's
Attributes:
target_dir (str): The target directory passed as a command line argument
mp3_directory (os.path): The path containing the GramVaani mp3's
"""
def __init__(self, target_dir, mp3_directory):
self.target_dir = target_dir
self.mp3_directory = Path(mp3_directory)
def convert(self):
"""Converts the mp3's associated with this instance to wav's
Return:
wav_directory (os.path): The directory into which the associated wav's were downloaded
"""
wav_directory = self._pre_convert()
for mp3_filename in self.mp3_directory.glob("**/*.mp3"):
wav_filename = os.path.join(
wav_directory,
os.path.splitext(os.path.basename(mp3_filename))[0] + ".wav",
)
if not os.path.exists(wav_filename):
_logger.debug(
"Converting mp3 file %s to wav file %s"
% (mp3_filename, wav_filename)
)
transformer = Transformer()
transformer.convert(
samplerate=SAMPLE_RATE, n_channels=N_CHANNELS, bitdepth=BITDEPTH
)
transformer.build(str(mp3_filename), str(wav_filename))
else:
_logger.debug(
"Already converted mp3 file %s to wav file %s"
% (mp3_filename, wav_filename)
)
return wav_directory
def _pre_convert(self):
wav_directory = os.path.join(self.target_dir, "wav")
if not os.path.exists(self.target_dir):
_logger.info("Creating directory...%s", self.target_dir)
os.mkdir(self.target_dir)
if not os.path.exists(wav_directory):
_logger.info("Creating directory...%s", wav_directory)
os.mkdir(wav_directory)
return wav_directory
class GramVaaniDataSets:
def __init__(self, target_dir, wav_directory, gram_vaani_csv):
self.target_dir = target_dir
self.wav_directory = wav_directory
self.csv_data = gram_vaani_csv.data
self.raw = pd.DataFrame(columns=["wav_filename", "wav_filesize", "transcript"])
self.valid = pd.DataFrame(
columns=["wav_filename", "wav_filesize", "transcript"]
)
self.train = pd.DataFrame(
columns=["wav_filename", "wav_filesize", "transcript"]
)
self.dev = pd.DataFrame(columns=["wav_filename", "wav_filesize", "transcript"])
self.test = pd.DataFrame(columns=["wav_filename", "wav_filesize", "transcript"])
def create(self):
self._convert_csv_data_to_raw_data()
self.raw.index = range(len(self.raw.index))
self.valid = self.raw[self._is_valid_raw_rows()]
self.valid = self.valid.sample(frac=1).reset_index(drop=True)
train_size, dev_size, test_size = self._calculate_data_set_sizes()
self.train = self.valid.loc[0:train_size]
self.dev = self.valid.loc[train_size : train_size + dev_size]
self.test = self.valid.loc[
train_size + dev_size : train_size + dev_size + test_size
]
def _convert_csv_data_to_raw_data(self):
self.raw[["wav_filename", "wav_filesize", "transcript"]] = self.csv_data[
["audio_url", "transcript", "audio_length"]
].swifter.apply(
func=lambda arg: self._convert_csv_data_to_raw_data_impl(*arg),
axis=1,
raw=True,
)
self.raw.reset_index()
def _convert_csv_data_to_raw_data_impl(self, audio_url, transcript, audio_length):
if audio_url == "audio_url":
return pd.Series(["wav_filename", "wav_filesize", "transcript"])
mp3_filename = os.path.basename(audio_url)
wav_relative_filename = os.path.join(
"wav", os.path.splitext(os.path.basename(mp3_filename))[0] + ".wav"
)
wav_filesize = os.path.getsize(
os.path.join(self.target_dir, wav_relative_filename)
)
transcript = validate_label(transcript)
if None == transcript:
transcript = ""
return pd.Series([wav_relative_filename, wav_filesize, transcript])
def _is_valid_raw_rows(self):
is_valid_raw_transcripts = self._is_valid_raw_transcripts()
is_valid_raw_wav_frames = self._is_valid_raw_wav_frames()
is_valid_raw_row = [
(is_valid_raw_transcript & is_valid_raw_wav_frame)
for is_valid_raw_transcript, is_valid_raw_wav_frame in zip(
is_valid_raw_transcripts, is_valid_raw_wav_frames
)
]
series = pd.Series(is_valid_raw_row)
return series
def _is_valid_raw_transcripts(self):
return pd.Series([bool(transcript) for transcript in self.raw.transcript])
def _is_valid_raw_wav_frames(self):
transcripts = [str(transcript) for transcript in self.raw.transcript]
wav_filepaths = [
os.path.join(self.target_dir, str(wav_filename))
for wav_filename in self.raw.wav_filename
]
wav_frames = [
int(
subprocess.check_output(
["soxi", "-s", wav_filepath], stderr=subprocess.STDOUT
)
)
for wav_filepath in wav_filepaths
]
is_valid_raw_wav_frames = [
self._is_wav_frame_valid(wav_frame, transcript)
for wav_frame, transcript in zip(wav_frames, transcripts)
]
return pd.Series(is_valid_raw_wav_frames)
def _is_wav_frame_valid(self, wav_frame, transcript):
is_wav_frame_valid = True
if int(wav_frame / SAMPLE_RATE * 1000 / 10 / 2) < len(str(transcript)):
is_wav_frame_valid = False
elif wav_frame / SAMPLE_RATE > MAX_SECS:
is_wav_frame_valid = False
return is_wav_frame_valid
def _calculate_data_set_sizes(self):
total_size = len(self.valid)
dev_size = math.floor(total_size * DEV_PERCENTAGE)
train_size = math.floor(total_size * TRAIN_PERCENTAGE)
test_size = total_size - (train_size + dev_size)
return (train_size, dev_size, test_size)
def save(self):
datasets = ["train", "dev", "test"]
for dataset in datasets:
self._save(dataset)
def _save(self, dataset):
dataset_path = os.path.join(self.target_dir, dataset + ".csv")
dataframe = getattr(self, dataset)
dataframe.to_csv(
dataset_path,
index=False,
encoding="utf-8",
escapechar="\\",
quoting=csv.QUOTE_MINIMAL,
)
def main(args):
"""Main entry point allowing external calls
Args:
args ([str]): command line parameter list
"""
args = parse_args(args)
validate_label = get_validate_label(args)
setup_logging(args.loglevel)
_logger.info("Starting GramVaani importer...")
_logger.info("Starting loading GramVaani csv...")
csv = GramVaaniCSV(args.csv_filename)
_logger.info("Starting downloading GramVaani mp3's...")
downloader = GramVaaniDownloader(csv, args.target_dir)
mp3_directory = downloader.download()
_logger.info("Starting converting GramVaani mp3's to wav's...")
converter = GramVaaniConverter(args.target_dir, mp3_directory)
wav_directory = converter.convert()
datasets = GramVaaniDataSets(args.target_dir, wav_directory, csv)
datasets.create()
datasets.save()
_logger.info("Finished GramVaani importer...")
main(sys.argv[1:])
| 0 |
coqui_public_repos/STT-models/indonesian/itml | coqui_public_repos/STT-models/indonesian/itml/v0.1.0/alphabet.txt |
a
b
c
d
e
f
g
h
i
j
k
l
m
n
o
p
r
s
t
u
v
w
x
y
z
| 0 |
coqui_public_repos | coqui_public_repos/STT/stats.py | #!/usr/bin/env python3
import argparse
import functools
from pathlib import Path
import pandas
from coqui_stt_training.util.helpers import secs_to_hours
def read_csvs(csv_files):
# Relative paths are relative to CSV location
def absolutify(csv, path):
path = Path(path)
if path.is_absolute():
return str(path)
return str(csv.parent / path)
sets = []
for csv in csv_files:
file = pandas.read_csv(csv, encoding="utf-8", na_filter=False)
file["wav_filename"] = file["wav_filename"].apply(
functools.partial(absolutify, csv)
)
sets.append(file)
# Concat all sets, drop any extra columns, re-index the final result as 0..N
return pandas.concat(sets, join="inner", ignore_index=True)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-csv",
"--csv-files",
help="Str. Filenames as a comma separated list",
required=True,
)
parser.add_argument(
"--sample-rate",
type=int,
default=16000,
required=False,
help="Audio sample rate",
)
parser.add_argument(
"--channels", type=int, default=1, required=False, help="Audio channels"
)
parser.add_argument(
"--bits-per-sample",
type=int,
default=16,
required=False,
help="Audio bits per sample",
)
args = parser.parse_args()
in_files = [Path(i).absolute() for i in args.csv_files.split(",")]
csv_dataframe = read_csvs(in_files)
total_bytes = csv_dataframe["wav_filesize"].sum()
total_files = len(csv_dataframe)
total_seconds = (
(csv_dataframe["wav_filesize"] - 44)
/ args.sample_rate
/ args.channels
/ (args.bits_per_sample // 8)
).sum()
print("Total bytes:", total_bytes)
print("Total files:", total_files)
print("Total time:", secs_to_hours(total_seconds))
if __name__ == "__main__":
main()
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/examples-vad_transcriber-py39.yml | build:
template_file: examples-base.tyml
docker_image: "python:3.9"
dependencies:
- "linux-amd64-cpu-opt"
args:
tests_cmdline: "${system.homedir.linux}/DeepSpeech/examples/vad_transcriber/test.sh 3.9.0:"
workerType: "${docker.dsTests}"
metadata:
name: "DeepSpeech examples: VAD transcriber Py3.9"
description: "DeepSpeech examples: VAD transcriber streaming Python 3.9"
| 0 |
coqui_public_repos/STT/native_client/kenlm | coqui_public_repos/STT/native_client/kenlm/util/read_compressed.hh | #ifndef UTIL_READ_COMPRESSED_H
#define UTIL_READ_COMPRESSED_H
#include "exception.hh"
#include "scoped.hh"
#include <cstddef>
#include <stdint.h>
namespace util {
class CompressedException : public Exception {
public:
CompressedException() throw();
virtual ~CompressedException() throw();
};
class GZException : public CompressedException {
public:
GZException() throw();
~GZException() throw();
};
class BZException : public CompressedException {
public:
BZException() throw();
~BZException() throw();
};
class XZException : public CompressedException {
public:
XZException() throw();
~XZException() throw();
};
class ReadCompressed;
class ReadBase {
public:
virtual ~ReadBase() {}
virtual std::size_t Read(void *to, std::size_t amount, ReadCompressed &thunk) = 0;
protected:
static void ReplaceThis(ReadBase *with, ReadCompressed &thunk);
ReadBase *Current(ReadCompressed &thunk);
static uint64_t &ReadCount(ReadCompressed &thunk);
};
class ReadCompressed {
public:
static const std::size_t kMagicSize = 6;
// Must have at least kMagicSize bytes.
static bool DetectCompressedMagic(const void *from);
// Takes ownership of fd.
explicit ReadCompressed(int fd);
// Try to avoid using this. Use the fd instead.
// There is no decompression support for istreams.
explicit ReadCompressed(std::istream &in);
// Must call Reset later.
ReadCompressed();
// Takes ownership of fd.
void Reset(int fd);
// Same advice as the constructor.
void Reset(std::istream &in);
std::size_t Read(void *to, std::size_t amount);
// Repeatedly call read to fill a buffer unless EOF is hit.
// Return number of bytes read.
std::size_t ReadOrEOF(void *const to, std::size_t amount);
uint64_t RawAmount() const { return raw_amount_; }
private:
friend class ReadBase;
scoped_ptr<ReadBase> internal_;
uint64_t raw_amount_;
};
} // namespace util
#endif // UTIL_READ_COMPRESSED_H
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions/mpdt/mpdtscript.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// Definitions of 'scriptable' versions of mpdt operations, that is,
// those that can be called with FstClass-type arguments.
//
// See comments in nlp/fst/script/script-impl.h for how the registration
// mechanism allows these to work with various arc types.
#include <string>
#include <vector>
#include <fst/extensions/mpdt/compose.h>
#include <fst/extensions/mpdt/expand.h>
#include <fst/extensions/mpdt/mpdtscript.h>
#include <fst/extensions/mpdt/reverse.h>
#include <fst/script/script-impl.h>
namespace fst {
namespace script {
void MPdtCompose(const FstClass &ifst1, const FstClass &ifst2,
const std::vector<LabelPair> &parens,
const std::vector<int64> &assignments, MutableFstClass *ofst,
const MPdtComposeOptions &copts, bool left_pdt) {
if (!internal::ArcTypesMatch(ifst1, ifst2, "MPdtCompose") ||
!internal::ArcTypesMatch(ifst1, *ofst, "MPdtCompose")) return;
MPdtComposeArgs args(ifst1, ifst2, parens, assignments, ofst, copts,
left_pdt);
Apply<Operation<MPdtComposeArgs>>("MPdtCompose", ifst1.ArcType(), &args);
}
void MPdtExpand(const FstClass &ifst, const std::vector<LabelPair> &parens,
const std::vector<int64> &assignments, MutableFstClass *ofst,
const MPdtExpandOptions &opts) {
MPdtExpandArgs args(ifst, parens, assignments, ofst, opts);
Apply<Operation<MPdtExpandArgs>>("MPdtExpand", ifst.ArcType(), &args);
}
void MPdtExpand(const FstClass &ifst, const std::vector<LabelPair> &parens,
const std::vector<int64> &assignments, MutableFstClass *ofst,
bool connect) {
MPdtExpand(ifst, parens, assignments, ofst, MPdtExpandOptions(connect));
}
void MPdtReverse(const FstClass &ifst, const std::vector<LabelPair> &parens,
std::vector<int64> *assignments, MutableFstClass *ofst) {
MPdtReverseArgs args(ifst, parens, assignments, ofst);
Apply<Operation<MPdtReverseArgs>>("MPdtReverse", ifst.ArcType(), &args);
}
void PrintMPdtInfo(const FstClass &ifst, const std::vector<LabelPair> &parens,
const std::vector<int64> &assignments) {
PrintMPdtInfoArgs args(ifst, parens, assignments);
Apply<Operation<PrintMPdtInfoArgs>>("PrintMPdtInfo", ifst.ArcType(), &args);
}
// Register operations for common arc types.
REGISTER_FST_MPDT_OPERATIONS(StdArc);
REGISTER_FST_MPDT_OPERATIONS(LogArc);
REGISTER_FST_MPDT_OPERATIONS(Log64Arc);
} // namespace script
} // namespace fst
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/bin/fstintersect.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/flags.h>
DEFINE_string(compose_filter, "auto",
"Composition filter, one of: \"alt_sequence\", \"auto\", "
"\"match\", \"null\", \"sequence\", \"trivial\"");
DEFINE_bool(connect, true, "Trim output");
int fstintersect_main(int argc, char **argv);
int main(int argc, char **argv) { return fstintersect_main(argc, argv); }
| 0 |
coqui_public_repos/STT/native_client | coqui_public_repos/STT/native_client/kenlm/setup.py | from setuptools import setup, Extension
import glob
import platform
import os
import sys
import re
#Does gcc compile with this header and library?
def compile_test(header, library):
dummy_path = os.path.join(os.path.dirname(__file__), "dummy")
command = "bash -c \"g++ -include " + header + " -l" + library + " -x c++ - <<<'int main() {}' -o " + dummy_path + " >/dev/null 2>/dev/null && rm " + dummy_path + " 2>/dev/null\""
return os.system(command) == 0
max_order = "6"
is_max_order = [s for s in sys.argv if "--max_order" in s]
for element in is_max_order:
max_order = re.split('[= ]',element)[1]
sys.argv.remove(element)
FILES = glob.glob('util/*.cc') + glob.glob('lm/*.cc') + glob.glob('util/double-conversion/*.cc') + glob.glob('python/*.cc')
FILES = [fn for fn in FILES if not (fn.endswith('main.cc') or fn.endswith('test.cc'))]
if platform.system() == 'Linux':
LIBS = ['stdc++', 'rt']
elif platform.system() == 'Darwin':
LIBS = ['c++']
else:
LIBS = []
#We don't need -std=c++11 but python seems to be compiled with it now. https://github.com/kpu/kenlm/issues/86
ARGS = ['-O3', '-DNDEBUG', '-DKENLM_MAX_ORDER='+max_order, '-std=c++11']
#Attempted fix to https://github.com/kpu/kenlm/issues/186 and https://github.com/kpu/kenlm/issues/197
if platform.system() == 'Darwin':
ARGS += ["-stdlib=libc++", "-mmacosx-version-min=10.7"]
if compile_test('zlib.h', 'z'):
ARGS.append('-DHAVE_ZLIB')
LIBS.append('z')
if compile_test('bzlib.h', 'bz2'):
ARGS.append('-DHAVE_BZLIB')
LIBS.append('bz2')
if compile_test('lzma.h', 'lzma'):
ARGS.append('-DHAVE_XZLIB')
LIBS.append('lzma')
ext_modules = [
Extension(name='kenlm',
sources=FILES + ['python/kenlm.cpp'],
language='C++',
include_dirs=['.'],
libraries=LIBS,
extra_compile_args=ARGS)
]
setup(
name='kenlm',
ext_modules=ext_modules,
include_package_data=True,
)
| 0 |
coqui_public_repos/STT/native_client | coqui_public_repos/STT/native_client/swift/Bindings.swift | //
// STT.swift
// stt_ios
//
// Created by Reuben Morais on 14.06.20.
// Copyright © 2020 Mozilla
// Copyright © 2021-2022 Coqui GmbH
import Foundation
import STTNative
public enum STTError: Error {
// Should be kept in sync with coqui-stt.h
case noModel(errorCode: Int32)
case invalidAlphabet(errorCode: Int32)
case invalidShape(errorCode: Int32)
case invalidScorer(errorCode: Int32)
case modelIncompatible(errorCode: Int32)
case scorerNotEnabled(errorCode: Int32)
case scorerUnreadable(errorCode: Int32)
case scorerInvalidLm(errorCode: Int32)
case scorerNoTrie(errorCode: Int32)
case scorerInvalidTrie(errorCode: Int32)
case scorerVersionMismatch(errorCode: Int32)
case failInitMmap(errorCode: Int32)
case failInitSess(errorCode: Int32)
case failInterpreter(errorCode: Int32)
case failRunSess(errorCode: Int32)
case failCreateStream(errorCode: Int32)
case failReadProtobuf(errorCode: Int32)
case failCreateSess(errorCode: Int32)
case failCreateModel(errorCode: Int32)
// Additional case for invalid error codes, should never happen unless the
// user has mixed header and binary versions.
case invalidErrorCode(errorCode: Int32)
}
extension STTError : LocalizedError {
public var errorDescription: String? {
switch self {
case .noModel(let errorCode),
.invalidAlphabet(let errorCode),
.invalidShape(let errorCode),
.invalidScorer(let errorCode),
.modelIncompatible(let errorCode),
.scorerNotEnabled(let errorCode),
.scorerUnreadable(let errorCode),
.scorerInvalidLm(let errorCode),
.scorerNoTrie(let errorCode),
.scorerInvalidTrie(let errorCode),
.scorerVersionMismatch(let errorCode),
.failInitMmap(let errorCode),
.failInitSess(let errorCode),
.failInterpreter(let errorCode),
.failRunSess(let errorCode),
.failCreateStream(let errorCode),
.failReadProtobuf(let errorCode),
.failCreateSess(let errorCode),
.failCreateModel(let errorCode),
.invalidErrorCode(let errorCode):
let result = STT_ErrorCodeToErrorMessage(errorCode)
defer { STT_FreeString(result) }
return String(cString: result!)
}
}
}
private func errorCodeToEnum(errorCode: Int32) -> STTError {
switch Int(errorCode) {
case Int(STT_ERR_NO_MODEL.rawValue):
return STTError.noModel(errorCode: errorCode)
case Int(STT_ERR_INVALID_ALPHABET.rawValue):
return STTError.invalidAlphabet(errorCode: errorCode)
case Int(STT_ERR_INVALID_SHAPE.rawValue):
return STTError.invalidShape(errorCode: errorCode)
case Int(STT_ERR_INVALID_SCORER.rawValue):
return STTError.invalidScorer(errorCode: errorCode)
case Int(STT_ERR_MODEL_INCOMPATIBLE.rawValue):
return STTError.modelIncompatible(errorCode: errorCode)
case Int(STT_ERR_SCORER_NOT_ENABLED.rawValue):
return STTError.scorerNotEnabled(errorCode: errorCode)
case Int(STT_ERR_SCORER_UNREADABLE.rawValue):
return STTError.scorerUnreadable(errorCode: errorCode)
case Int(STT_ERR_SCORER_INVALID_LM.rawValue):
return STTError.scorerInvalidLm(errorCode: errorCode)
case Int(STT_ERR_SCORER_NO_TRIE.rawValue):
return STTError.scorerNoTrie(errorCode: errorCode)
case Int(STT_ERR_SCORER_INVALID_TRIE.rawValue):
return STTError.scorerInvalidTrie(errorCode: errorCode)
case Int(STT_ERR_SCORER_VERSION_MISMATCH.rawValue):
return STTError.scorerVersionMismatch(errorCode: errorCode)
case Int(STT_ERR_FAIL_INIT_MMAP.rawValue):
return STTError.failInitMmap(errorCode: errorCode)
case Int(STT_ERR_FAIL_INIT_SESS.rawValue):
return STTError.failInitSess(errorCode: errorCode)
case Int(STT_ERR_FAIL_INTERPRETER.rawValue):
return STTError.failInterpreter(errorCode: errorCode)
case Int(STT_ERR_FAIL_RUN_SESS.rawValue):
return STTError.failRunSess(errorCode: errorCode)
case Int(STT_ERR_FAIL_CREATE_STREAM.rawValue):
return STTError.failCreateStream(errorCode: errorCode)
case Int(STT_ERR_FAIL_READ_PROTOBUF.rawValue):
return STTError.failReadProtobuf(errorCode: errorCode)
case Int(STT_ERR_FAIL_CREATE_SESS.rawValue):
return STTError.failCreateSess(errorCode: errorCode)
case Int(STT_ERR_FAIL_CREATE_MODEL.rawValue):
return STTError.failCreateModel(errorCode: errorCode)
default:
return STTError.invalidErrorCode(errorCode: errorCode)
}
}
private func evaluateErrorCode(errorCode: Int32) throws {
if errorCode != Int32(STT_ERR_OK.rawValue) {
throw errorCodeToEnum(errorCode: errorCode)
}
}
/// Stores text of an individual token, along with its timing information
public struct STTTokenMetadata {
/// The text corresponding to this token
public let text: String
/// Position of the token in units of 20ms
public let timestep: Int
/// Position of the token in seconds
public let startTime: Float
internal init(fromInternal: TokenMetadata) {
text = String(cString: fromInternal.text)
timestep = Int(fromInternal.timestep)
startTime = fromInternal.start_time
}
}
/** A single transcript computed by the model, including a confidence value and
the metadata for its constituent tokens
*/
public struct STTCandidateTranscript {
/// Array of STTTokenMetadata objects
public private(set) var tokens: [STTTokenMetadata] = []
/** Approximated confidence value for this transcript. This corresponds to
both acoustic model and language model scores that contributed to the
creation of this transcript.
*/
let confidence: Double
internal init(fromInternal: CandidateTranscript) {
let tokensBuffer = UnsafeBufferPointer<TokenMetadata>(start: fromInternal.tokens, count: Int(fromInternal.num_tokens))
for tok in tokensBuffer {
tokens.append(STTTokenMetadata(fromInternal: tok))
}
confidence = fromInternal.confidence
}
}
/// An array of STTCandidateTranscript objects computed by the model
public struct STTMetadata {
/// Array of STTCandidateTranscript objects
public private(set) var transcripts: [STTCandidateTranscript] = []
internal init(fromInternal: UnsafeMutablePointer<Metadata>) {
let md = fromInternal.pointee
let transcriptsBuffer = UnsafeBufferPointer<CandidateTranscript>(
start: md.transcripts,
count: Int(md.num_transcripts))
for tr in transcriptsBuffer {
transcripts.append(STTCandidateTranscript(fromInternal: tr))
}
}
}
public class STTStream {
private var streamCtx: OpaquePointer!
internal init(streamContext: OpaquePointer) {
streamCtx = streamContext
}
deinit {
if streamCtx != nil {
STT_FreeStream(streamCtx)
streamCtx = nil
}
}
/** Feed audio samples to an ongoing streaming inference.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Precondition: `finishStream()` has not been called on this stream.
*/
public func feedAudioContent(buffer: Array<Int16>) {
precondition(streamCtx != nil, "calling method on invalidated Stream")
buffer.withUnsafeBufferPointer { unsafeBufferPointer in
feedAudioContent(buffer: unsafeBufferPointer)
}
}
/** Feed audio samples to an ongoing streaming inference.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Precondition: `finishStream()` has not been called on this stream.
*/
public func feedAudioContent(buffer: UnsafeBufferPointer<Int16>) {
precondition(streamCtx != nil, "calling method on invalidated Stream")
STT_FeedAudioContent(streamCtx, buffer.baseAddress, UInt32(buffer.count))
}
/** Compute the intermediate decoding of an ongoing streaming inference.
- Precondition: `finishStream()` has not been called on this stream.
- Returns: The STT intermediate result.
*/
public func intermediateDecode() -> String {
precondition(streamCtx != nil, "calling method on invalidated Stream")
let result = STT_IntermediateDecode(streamCtx)
defer { STT_FreeString(result) }
return String(cString: result!)
}
/** Compute the intermediate decoding of an ongoing streaming inference,
return results including metadata.
- Parameter numResults: The number of candidate transcripts to return.
- Precondition: `finishStream()` has not been called on this stream.
- Returns: Metadata struct containing multiple CandidateTranscript structs.
Each transcript has per-token metadata including timing information.
*/
public func intermediateDecodeWithMetadata(numResults: Int) -> STTMetadata {
precondition(streamCtx != nil, "calling method on invalidated Stream")
let result = STT_IntermediateDecodeWithMetadata(streamCtx, UInt32(numResults))!
defer { STT_FreeMetadata(result) }
return STTMetadata(fromInternal: result)
}
/** Compute the final decoding of an ongoing streaming inference and return
the result. Signals the end of an ongoing streaming inference.
- Precondition: `finishStream()` has not been called on this stream.
- Returns: The STT result.
- Postcondition: This method will invalidate this streaming context.
*/
public func finishStream() -> String {
precondition(streamCtx != nil, "calling method on invalidated Stream")
let result = STT_FinishStream(streamCtx)
defer {
STT_FreeString(result)
streamCtx = nil
}
return String(cString: result!)
}
/** Compute the final decoding of an ongoing streaming inference and return
results including metadata. Signals the end of an ongoing streaming
inference.
- Parameter numResults: The number of candidate transcripts to return.
- Precondition: `finishStream()` has not been called on this stream.
- Returns: Metadata struct containing multiple CandidateTranscript structs.
Each transcript has per-token metadata including timing information.
- Postcondition: This method will invalidate this streaming context.
*/
public func finishStreamWithMetadata(numResults: Int) -> STTMetadata {
precondition(streamCtx != nil, "calling method on invalidated Stream")
let result = STT_FinishStreamWithMetadata(streamCtx, UInt32(numResults))!
defer {
STT_FreeMetadata(result)
streamCtx = nil
}
return STTMetadata(fromInternal: result)
}
}
/// An object providing an interface to a trained STT model.
public class STTModel {
private var modelCtx: OpaquePointer!
/**
- Parameter modelPath: The path to the model file.
- Throws: `STTError` on failure.
*/
public init(modelPath: String) throws {
let err = STT_CreateModel(modelPath, &modelCtx)
try evaluateErrorCode(errorCode: err)
}
deinit {
STT_FreeModel(modelCtx)
modelCtx = nil
}
/** Get beam width value used by the model. If {@link STT_SetModelBeamWidth}
was not called before, will return the default value loaded from the
model file.
- Returns: Beam width value used by the model.
*/
public func getBeamWidth() -> Int {
return Int(STT_GetModelBeamWidth(modelCtx))
}
/** Set beam width value used by the model.
- Parameter beamWidth: The beam width used by the model. A larger beam
width value generates better results at the cost
of decoding time.
- Throws: `STTError` on failure.
*/
public func setBeamWidth(beamWidth: Int) throws {
let err = STT_SetModelBeamWidth(modelCtx, UInt32(beamWidth))
try evaluateErrorCode(errorCode: err)
}
// The sample rate expected by the model.
public var sampleRate: Int {
get {
return Int(STT_GetModelSampleRate(modelCtx))
}
}
/** Enable decoding using an external scorer.
- Parameter scorerPath: The path to the external scorer file.
- Throws: `STTError` on failure.
*/
public func enableExternalScorer(scorerPath: String) throws {
let err = STT_EnableExternalScorer(modelCtx, scorerPath)
try evaluateErrorCode(errorCode: err)
}
/** Disable decoding using an external scorer.
- Throws: `STTError` on failure.
*/
public func disableExternalScorer() throws {
let err = STT_DisableExternalScorer(modelCtx)
try evaluateErrorCode(errorCode: err)
}
/** Set hyperparameters alpha and beta of the external scorer.
- Parameter alpha: The alpha hyperparameter of the decoder. Language model weight.
- Parameter beta: The beta hyperparameter of the decoder. Word insertion weight.
- Throws: `STTError` on failure.
*/
public func setScorerAlphaBeta(alpha: Float, beta: Float) throws {
let err = STT_SetScorerAlphaBeta(modelCtx, alpha, beta)
try evaluateErrorCode(errorCode: err)
}
/** Use the STT model to convert speech to text.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Returns: The STT result.
*/
public func speechToText(buffer: Array<Int16>) -> String {
return buffer.withUnsafeBufferPointer { unsafeBufferPointer -> String in
return speechToText(buffer: unsafeBufferPointer)
}
}
/** Use the STT model to convert speech to text.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Returns: The STT result.
*/
public func speechToText(buffer: UnsafeBufferPointer<Int16>) -> String {
let result = STT_SpeechToText(modelCtx, buffer.baseAddress, UInt32(buffer.count))
defer { STT_FreeString(result) }
return String(cString: result!)
}
/** Use the STT model to convert speech to text and output results
including metadata.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Parameter numResults: The maximum number of STTCandidateTranscript
structs to return. Returned value might be smaller than this.
- Returns: Metadata struct containing multiple CandidateTranscript structs.
Each transcript has per-token metadata including timing information.
*/
public func speechToTextWithMetadata(buffer: Array<Int16>, numResults: Int) -> STTMetadata {
return buffer.withUnsafeBufferPointer { unsafeBufferPointer -> STTMetadata in
return speechToTextWithMetadata(buffer: unsafeBufferPointer, numResults: numResults)
}
}
/** Use the STT model to convert speech to text and output results
including metadata.
- Parameter buffer: A 16-bit, mono raw audio signal at the appropriate
sample rate (matching what the model was trained on).
- Parameter numResults: The maximum number of STTCandidateTranscript
structs to return. Returned value might be smaller than this.
- Returns: Metadata struct containing multiple CandidateTranscript structs.
Each transcript has per-token metadata including timing information.
*/
public func speechToTextWithMetadata(buffer: UnsafeBufferPointer<Int16>, numResults: Int) -> STTMetadata {
let result = STT_SpeechToTextWithMetadata(
modelCtx,
buffer.baseAddress,
UInt32(buffer.count),
UInt32(numResults))!
defer { STT_FreeMetadata(result) }
return STTMetadata(fromInternal: result)
}
/** Create a new streaming inference state.
- Returns: STTStream object representing the streaming state.
- Throws: `STTError` on failure.
*/
public func createStream() throws -> STTStream {
var streamContext: OpaquePointer!
let err = STT_CreateStream(modelCtx, &streamContext)
try evaluateErrorCode(errorCode: err)
return STTStream(streamContext: streamContext)
}
}
public func STTVersion() -> String {
let result = STT_Version()
defer { STT_FreeString(result) }
return String(cString: result!)
}
| 0 |
coqui_public_repos | coqui_public_repos/STT/Makefile | STT_REPO ?= https://github.com/coqui-ai/STT.git
STT_SHA ?= origin/main
Dockerfile%: Dockerfile%.tmpl
sed \
-e "s|#STT_REPO#|$(STT_REPO)|g" \
-e "s|#STT_SHA#|$(STT_SHA)|g" \
< $< > $@
| 0 |
coqui_public_repos/Trainer | coqui_public_repos/Trainer/trainer/TODO.txt | + Accumulate gradients b/w batches.
+ Abstract DashLogger
+ MLFlow logger
+ Profiler integration.
+ Moving `training_assets` to the model implementation.
- Wrap model for not calling .module in DDP.
- Overfitting to a batch.
- TPU training
- BaseTrainingConfig
- Add Checkpoint manager
- Use `logging` instead of `print`
- Auto scaling the batch size and find the largest batch size for training.
- Stochastic weight averaging
- Deepspeed integration
| 0 |
coqui_public_repos/STT-models/wolof/itml | coqui_public_repos/STT-models/wolof/itml/v0.1.0/alphabet.txt |
a
b
c
d
e
f
g
i
j
k
l
m
n
o
p
q
r
s
t
u
w
x
y
à
é
ë
ñ
ó
ŋ
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/test-nodejs_10x_8k-linux-amd64-opt.yml | build:
template_file: test-linux-opt-base.tyml
docker_image: "ubuntu:16.04"
dependencies:
- "linux-amd64-cpu-opt"
- "test-training_8k-linux-amd64-py36m-opt"
test_model_task: "test-training_8k-linux-amd64-py36m-opt"
system_setup:
>
${nodejs.packages_xenial.prep_10} && ${nodejs.packages_xenial.apt_pinning} && apt-get -qq update && apt-get -qq -y install ${nodejs.packages_xenial.apt}
args:
tests_cmdline: "${system.homedir.linux}/DeepSpeech/ds/taskcluster/tc-node-tests.sh 10.x 8k"
workerType: "${docker.dsTests}"
metadata:
name: "DeepSpeech Linux AMD64 CPU NodeJS 10.x tests (8kHz)"
description: "Testing DeepSpeech for Linux/AMD64 on NodeJS v10.x, CPU only, optimized version (8kHz)"
| 0 |
coqui_public_repos/STT/native_client/kenlm | coqui_public_repos/STT/native_client/kenlm/util/read_compressed_test.cc | #include "read_compressed.hh"
#include "file.hh"
#include "have.hh"
#define BOOST_TEST_MODULE ReadCompressedTest
#include <boost/test/unit_test.hpp>
#include <boost/scoped_ptr.hpp>
#include <fstream>
#include <string>
#include <cstdlib>
#if defined __MINGW32__
#include <ctime>
#include <fcntl.h>
#if !defined mkstemp
// TODO insecure
int mkstemp(char * stemplate)
{
char *filename = mktemp(stemplate);
if (filename == NULL)
return -1;
return open(filename, O_RDWR | O_CREAT, 0600);
}
#endif
#endif // defined
namespace util {
namespace {
void ReadLoop(ReadCompressed &reader, void *to_void, std::size_t amount) {
uint8_t *to = static_cast<uint8_t*>(to_void);
while (amount) {
std::size_t ret = reader.Read(to, amount);
BOOST_REQUIRE(ret);
to += ret;
amount -= ret;
}
}
const uint32_t kSize4 = 100000 / 4;
std::string WriteRandom() {
char name[] = "tempXXXXXX";
scoped_fd original(mkstemp(name));
BOOST_REQUIRE(original.get() > 0);
for (uint32_t i = 0; i < kSize4; ++i) {
WriteOrThrow(original.get(), &i, sizeof(uint32_t));
}
return name;
}
void VerifyRead(ReadCompressed &reader) {
for (uint32_t i = 0; i < kSize4; ++i) {
uint32_t got;
ReadLoop(reader, &got, sizeof(uint32_t));
BOOST_CHECK_EQUAL(i, got);
}
char ignored;
BOOST_CHECK_EQUAL((std::size_t)0, reader.Read(&ignored, 1));
// Test double EOF call.
BOOST_CHECK_EQUAL((std::size_t)0, reader.Read(&ignored, 1));
}
void TestRandom(const char *compressor) {
std::string name(WriteRandom());
char gzname[] = "tempXXXXXX";
scoped_fd gzipped(mkstemp(gzname));
std::string command(compressor);
#ifdef __CYGWIN__
command += ".exe";
#endif
command += " <\"";
command += name;
command += "\" >\"";
command += gzname;
command += "\"";
BOOST_REQUIRE_EQUAL(0, system(command.c_str()));
BOOST_CHECK_EQUAL(0, unlink(name.c_str()));
BOOST_CHECK_EQUAL(0, unlink(gzname));
ReadCompressed reader(gzipped.release());
VerifyRead(reader);
}
BOOST_AUTO_TEST_CASE(Uncompressed) {
TestRandom("cat");
}
#ifdef HAVE_ZLIB
BOOST_AUTO_TEST_CASE(ReadGZ) {
TestRandom("gzip");
}
#endif // HAVE_ZLIB
#ifdef HAVE_BZLIB
BOOST_AUTO_TEST_CASE(ReadBZ) {
TestRandom("bzip2");
}
#endif // HAVE_BZLIB
#ifdef HAVE_XZLIB
BOOST_AUTO_TEST_CASE(ReadXZ) {
TestRandom("xz");
}
#endif
#ifdef HAVE_ZLIB
BOOST_AUTO_TEST_CASE(AppendGZ) {
}
#endif
BOOST_AUTO_TEST_CASE(IStream) {
std::string name(WriteRandom());
std::fstream stream(name.c_str(), std::ios::in);
BOOST_CHECK_EQUAL(0, unlink(name.c_str()));
ReadCompressed reader;
reader.Reset(stream);
VerifyRead(reader);
}
} // namespace
} // namespace util
| 0 |
coqui_public_repos | coqui_public_repos/STT/requirements_transcribe.txt | webrtcvad
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions/python/pywrapfst.pxd | # See www.openfst.org for extensive documentation on this weighted
# finite-state transducer library.
from libc.time cimport time
from libc.time cimport time_t
from libcpp cimport bool
from libcpp.memory cimport shared_ptr
from libcpp.memory cimport unique_ptr
from libcpp.utility cimport pair
from libcpp.vector cimport vector
from libcpp.string cimport string
from basictypes cimport int32
from basictypes cimport int64
from basictypes cimport uint32
from basictypes cimport uint64
cimport fst as fst
from ios cimport stringstream
# Exportable helper functions.
cdef string tostring(data, encoding=?) except *
cdef string weight_tostring(data, encoding=?) except *
cdef fst.ComposeFilter _get_compose_filter(
const string &compose_filter) except *
cdef fst.DeterminizeType _get_determinize_type(const string &det_type) except *
cdef fst.QueueType _get_queue_type(const string &queue_type) except *
cdef fst.RandArcSelection _get_rand_arc_selection(
const string &replace_label_type) except *
cdef fst.ReplaceLabelType _get_replace_label_type(
const string &replace_label_type, bool epsilon_on_replace) except *
# Weight.
cdef fst.WeightClass _get_WeightClass_or_One(const string &weight_type,
weight_string) except *
cdef fst.WeightClass _get_WeightClass_or_Zero(const string &weight_type,
weight_string) except *
cdef class Weight(object):
cdef unique_ptr[fst.WeightClass] _weight
cdef void _check_weight(self) except *
cpdef Weight copy(self)
cpdef string to_string(self)
cpdef string type(self)
cdef Weight _Zero(weight_type)
cdef Weight _One(weight_type)
cdef Weight _NoWeight(weight_type)
cdef Weight _plus(Weight lhs, Weight rhs)
cdef Weight _times(Weight lhs, Weight rhs)
cdef Weight _divide(Weight lhs, Weight rhs)
cdef Weight _power(Weight lhs, size_t n)
# SymbolTable.
ctypedef fst.SymbolTable * SymbolTable_ptr
cdef class _SymbolTable(object):
cdef fst.SymbolTable *_table
cpdef int64 available_key(self)
cpdef bytes checksum(self)
cpdef SymbolTable copy(self)
cpdef int64 get_nth_key(self, ssize_t pos) except *
cpdef bytes labeled_checksum(self)
cpdef bool member(self, key)
cpdef string name(self)
cpdef size_t num_symbols(self)
cpdef void write(self, filename) except *
cpdef void write_text(self, filename) except *
cdef class _EncodeMapperSymbolTable(_SymbolTable):
cdef shared_ptr[fst.EncodeMapperClass] _encoder
cdef class _FstSymbolTable(_SymbolTable):
cdef shared_ptr[fst.FstClass] _fst
cdef class _MutableSymbolTable(_SymbolTable):
cpdef int64 add_symbol(self, symbol, int64 key=?)
cpdef void add_table(self, _SymbolTable syms)
cpdef void set_name(self, new_name) except *
cdef class _MutableFstSymbolTable(_MutableSymbolTable):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef class SymbolTable(_MutableSymbolTable):
cdef unique_ptr[fst.SymbolTable] _smart_table
cdef _EncodeMapperSymbolTable _init_EncodeMapperSymbolTable(
fst.SymbolTable *table, shared_ptr[fst.EncodeMapperClass] encoder)
cdef _FstSymbolTable _init_FstSymbolTable(fst.SymbolTable *table,
shared_ptr[fst.FstClass] ifst)
cdef _MutableFstSymbolTable _init_MutableFstSymbolTable(fst.SymbolTable *table,
shared_ptr[fst.MutableFstClass] ifst)
cdef SymbolTable _init_SymbolTable(fst.SymbolTable *table)
cdef class SymbolTableIterator(object):
cdef shared_ptr[fst.SymbolTable] _table
cdef unique_ptr[fst.SymbolTableIterator] _siter
cpdef bool done(self)
cpdef void next(self)
cpdef void reset(self)
cpdef string symbol(self)
cpdef int64 value(self)
# EncodeMapper.
cdef class EncodeMapper(object):
cdef shared_ptr[fst.EncodeMapperClass] _encoder
cpdef string arc_type(self)
cpdef uint32 flags(self)
cpdef _EncodeMapperSymbolTable input_symbols(self)
cpdef _EncodeMapperSymbolTable output_symbols(self)
cpdef uint64 properties(self, uint64 mask)
cpdef void set_input_symbols(self, _SymbolTable syms) except *
cpdef void set_output_symbols(self, _SymbolTable syms) except *
cpdef string weight_type(self)
# Fst.
ctypedef fst.FstClass * FstClass_ptr
ctypedef fst.MutableFstClass * MutableFstClass_ptr
ctypedef fst.VectorFstClass * VectorFstClass_ptr
cdef class _Fst(object):
cdef shared_ptr[fst.FstClass] _fst
cpdef string arc_type(self)
cpdef ArcIterator arcs(self, int64 state)
cpdef _Fst copy(self)
cpdef void draw(self, filename, _SymbolTable isymbols=?,
_SymbolTable osymbols=?, SymbolTable ssymbols=?,
bool acceptor=?, title=?, double width=?,
double height=?, bool portrait=?, bool vertical=?,
double ranksep=?, double nodesep=?, int32 fontsize=?,
int32 precision=?, float_format=?,
bool show_weight_one=?)
cpdef Weight final(self, int64 state)
cpdef string fst_type(self)
cpdef _FstSymbolTable input_symbols(self)
cpdef size_t num_arcs(self, int64 state) except *
cpdef size_t num_input_epsilons(self, int64 state) except *
cpdef size_t num_output_epsilons(self, int64 state) except *
cpdef _FstSymbolTable output_symbols(self)
cpdef uint64 properties(self, uint64 mask, bool test)
cpdef int64 start(self)
cpdef StateIterator states(self)
cpdef string text(self, _SymbolTable isymbols=?, _SymbolTable osymbols=?,
_SymbolTable ssymbols=?, bool acceptor=?,
bool show_weight_one=?, missing_sym=?)
cpdef bool verify(self)
cpdef string weight_type(self)
cpdef void write(self, filename) except *
cpdef bytes write_to_string(self)
cdef class _MutableFst(_Fst):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef void _check_mutating_imethod(self) except *
cdef void _add_arc(self, int64 state, Arc arc) except *
cpdef int64 add_state(self) except *
cdef void _arcsort(self, sort_type=?) except *
cdef void _closure(self, bool closure_plus=?) except *
cdef void _concat(self, _Fst ifst) except *
cdef void _connect(self) except *
cdef void _decode(self, EncodeMapper) except *
cdef void _delete_arcs(self, int64 state, size_t n=?) except *
cdef void _delete_states(self, states=?) except *
cdef void _encode(self, EncodeMapper) except *
cdef void _invert(self) except *
cdef void _minimize(self, float delta=?, bool allow_nondet=?) except *
cpdef MutableArcIterator mutable_arcs(self, int64 state)
cpdef int64 num_states(self)
cdef void _project(self, bool project_output=?) except *
cdef void _prune(self, float delta=?, int64 nstate=?, weight=?) except *
cdef void _push(self, float delta=?, bool remove_total_weight=?,
bool to_final=?) except *
cdef void _relabel_pairs(self, ipairs=?, opairs=?) except *
cdef void _relabel_tables(self, _SymbolTable old_isymbols=?,
_SymbolTable new_isymbols=?, unknown_isymbol=?,
bool attach_new_isymbols=?,
_SymbolTable old_osymbols=?, _SymbolTable new_osymbols=?,
unknown_osymbol=?, bool attach_new_osymbols=?) except *
cdef void _reserve_arcs(self, int64 state, size_t n) except *
cdef void _reserve_states(self, int64 n) except *
cdef void _reweight(self, potentials, bool to_final=?) except *
cdef void _rmepsilon(self, queue_type=?, bool connect=?, weight=?,
int64 nstate=?, float delta=?) except *
cdef void _set_final(self, int64 state, weight=?) except *
cdef void _set_properties(self, uint64 props, uint64 mask)
cdef void _set_start(self, int64 state) except *
cdef void _set_input_symbols(self, _SymbolTable syms) except *
cdef void _set_output_symbols(self, _SymbolTable syms) except *
cdef void _topsort(self) except *
cdef void _union(self, _Fst ifst) except *
# Fst construction helpers.
cdef _Fst _init_Fst(FstClass_ptr tfst)
cdef _MutableFst _init_MutableFst(MutableFstClass_ptr tfst)
cdef _Fst _init_XFst(FstClass_ptr tfst)
cdef _MutableFst _create_Fst(arc_type=?)
cpdef _Fst _read(filename)
cpdef _Fst _read_from_string(state)
# Iterators.
cdef class Arc(object):
cdef unique_ptr[fst.ArcClass] _arc
cpdef Arc copy(self)
cdef Arc _init_Arc(const fst.ArcClass &arc)
cdef class ArcIterator(object):
cdef shared_ptr[fst.FstClass] _fst
cdef unique_ptr[fst.ArcIteratorClass] _aiter
cpdef bool done(self)
cpdef uint32 flags(self)
cpdef void next(self)
cpdef size_t position(self)
cpdef void reset(self)
cpdef void seek(self, size_t a)
cpdef void set_flags(self, uint32 flags, uint32 mask)
cpdef object value(self)
cdef class MutableArcIterator(object):
cdef shared_ptr[fst.MutableFstClass] _mfst
cdef unique_ptr[fst.MutableArcIteratorClass] _aiter
cpdef bool done(self)
cpdef uint32 flags(self)
cpdef void next(self)
cpdef size_t position(self)
cpdef void reset(self)
cpdef void seek(self, size_t a)
cpdef void set_flags(self, uint32 flags, uint32 mask)
cpdef void set_value(self, Arc arc)
cpdef object value(self)
cdef class StateIterator(object):
cdef shared_ptr[fst.FstClass] _fst
cdef unique_ptr[fst.StateIteratorClass] _siter
cpdef bool done(self)
cpdef void next(self)
cpdef void reset(self)
cpdef int64 value(self)
# Constructive operations on Fst.
cdef _Fst _map(_Fst ifst, float delta=?, map_type=?, double power=?, weight=?)
cpdef _Fst arcmap(_Fst ifst, float delta=?, map_type=?, double power=?,
weight=?)
cpdef _MutableFst compose(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef _Fst convert(_Fst ifst, fst_type=?)
cpdef _MutableFst determinize(_Fst ifst, float delta=?, det_type=?,
int64 nstate=?, int64 subsequential_label=?,
weight=?, bool increment_subsequential_label=?)
cpdef _MutableFst difference(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef _MutableFst disambiguate(_Fst ifst, float delta=?, int64 nstate=?,
int64 subsequential_label=?, weight=?)
cpdef _MutableFst epsnormalize(_Fst ifst, bool eps_norm_output=?)
cpdef bool equal(_Fst ifst1, _Fst ifst2, float delta=?)
cpdef bool equivalent(_Fst ifst1, _Fst ifst2, float delta=?) except *
cpdef _MutableFst intersect(_Fst ifst1, _Fst ifst2, compose_filter=?,
bool connect=?)
cpdef bool isomorphic(_Fst ifst1, _Fst ifst2, float delta=?)
cpdef _MutableFst prune(_Fst ifst, float delta=?, int64 nstate=?,
weight=?)
cpdef _MutableFst push(_Fst ifst, float delta=?, bool push_weights=?,
bool push_labels=?, bool remove_common_affix=?,
bool remove_total_weight=?, bool to_final=?)
cpdef bool randequivalent(_Fst ifst1, _Fst ifst2, int32 npath=?,
float delta=?, time_t seed=?, select=?,
int32 max_length=?) except *
cpdef _MutableFst randgen(_Fst ifst, int32 npath=?, time_t seed=?,
select=?, int32 max_length=?,
bool remove_total_weight=?, bool weighted=?)
cdef fst.ReplaceLabelType _get_replace_label_type(string rlt,
bool epsilon_on_replace) except *
cpdef _MutableFst replace(pairs, call_arc_labeling=?, return_arc_labeling=?,
bool epsilon_on_replace=?, int64 return_label=?)
cpdef _MutableFst reverse(_Fst ifst, bool require_superinitial=?)
cdef vector[fst.WeightClass] *_shortestdistance(_Fst ifst, float delta=?,
int64 nstate=?, queue_type=?,
bool reverse=?) except *
cpdef _MutableFst shortestpath(_Fst ifst, float delta=?, int32 nshortest=?,
int64 nstate=?, queue_type=?, bool unique=?,
weight=?)
cpdef _Fst statemap(_Fst ifst, map_type)
cpdef _MutableFst synchronize(_Fst ifst)
# Compiler.
cdef class Compiler(object):
cdef unique_ptr[stringstream] _sstrm
cdef string _fst_type
cdef string _arc_type
cdef const fst.SymbolTable *_isymbols
cdef const fst.SymbolTable *_osymbols
cdef const fst.SymbolTable *_ssymbols
cdef bool _acceptor
cdef bool _keep_isymbols
cdef bool _keep_osymbols
cdef bool _keep_state_numbering
cdef bool _allow_negative_labels
cpdef _Fst compile(self)
cpdef void write(self, expression)
# FarReader.
cdef class FarReader(object):
cdef unique_ptr[fst.FarReaderClass] _reader
cpdef string arc_type(self)
cpdef bool done(self)
cpdef bool error(self)
cpdef string far_type(self)
cpdef bool find(self, key) except *
cpdef _Fst get_fst(self)
cpdef string get_key(self)
cpdef void next(self)
cpdef void reset(self)
# FarWriter.
cdef class FarWriter(object):
cdef unique_ptr[fst.FarWriterClass] _writer
cpdef string arc_type(self)
cdef void close(self)
cpdef void add(self, key, _Fst ifst) except *
cpdef bool error(self)
cpdef string far_type(self)
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions/special/sigma-fst.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_EXTENSIONS_SPECIAL_SIGMA_FST_H_
#define FST_EXTENSIONS_SPECIAL_SIGMA_FST_H_
#include <memory>
#include <string>
#include <fst/const-fst.h>
#include <fst/matcher-fst.h>
#include <fst/matcher.h>
DECLARE_int64_t(sigma_fst_sigma_label);
DECLARE_string(sigma_fst_rewrite_mode);
namespace fst {
namespace internal {
template <class Label>
class SigmaFstMatcherData {
public:
explicit SigmaFstMatcherData(Label sigma_label = FLAGS_sigma_fst_sigma_label,
MatcherRewriteMode rewrite_mode =
RewriteMode(FLAGS_sigma_fst_rewrite_mode))
: sigma_label_(sigma_label), rewrite_mode_(rewrite_mode) {}
SigmaFstMatcherData(const SigmaFstMatcherData &data)
: sigma_label_(data.sigma_label_), rewrite_mode_(data.rewrite_mode_) {}
static SigmaFstMatcherData<Label> *Read(std::istream &istrm,
const FstReadOptions &read) {
auto *data = new SigmaFstMatcherData<Label>();
ReadType(istrm, &data->sigma_label_);
int32_t rewrite_mode;
ReadType(istrm, &rewrite_mode);
data->rewrite_mode_ = static_cast<MatcherRewriteMode>(rewrite_mode);
return data;
}
bool Write(std::ostream &ostrm, const FstWriteOptions &opts) const {
WriteType(ostrm, sigma_label_);
WriteType(ostrm, static_cast<int32_t>(rewrite_mode_));
return !ostrm ? false : true;
}
Label SigmaLabel() const { return sigma_label_; }
MatcherRewriteMode RewriteMode() const { return rewrite_mode_; }
private:
static MatcherRewriteMode RewriteMode(const string &mode) {
if (mode == "auto") return MATCHER_REWRITE_AUTO;
if (mode == "always") return MATCHER_REWRITE_ALWAYS;
if (mode == "never") return MATCHER_REWRITE_NEVER;
LOG(WARNING) << "SigmaFst: Unknown rewrite mode: " << mode << ". "
<< "Defaulting to auto.";
return MATCHER_REWRITE_AUTO;
}
Label sigma_label_;
MatcherRewriteMode rewrite_mode_;
};
} // namespace internal
constexpr uint8_t kSigmaFstMatchInput = 0x01; // Input matcher is SigmaMatcher.
constexpr uint8_t kSigmaFstMatchOutput = 0x02; // Output matcher is SigmaMatcher.
template <class M, uint8_t flags = kSigmaFstMatchInput | kSigmaFstMatchOutput>
class SigmaFstMatcher : public SigmaMatcher<M> {
public:
using FST = typename M::FST;
using Arc = typename M::Arc;
using StateId = typename Arc::StateId;
using Label = typename Arc::Label;
using Weight = typename Arc::Weight;
using MatcherData = internal::SigmaFstMatcherData<Label>;
enum : uint8_t { kFlags = flags };
// This makes a copy of the FST.
SigmaFstMatcher(
const FST &fst, MatchType match_type,
std::shared_ptr<MatcherData> data = std::make_shared<MatcherData>())
: SigmaMatcher<M>(
fst, match_type,
SigmaLabel(match_type,
data ? data->SigmaLabel() : MatcherData().SigmaLabel()),
data ? data->RewriteMode() : MatcherData().RewriteMode()),
data_(data) {}
// This doesn't copy the FST.
SigmaFstMatcher(
const FST *fst, MatchType match_type,
std::shared_ptr<MatcherData> data = std::make_shared<MatcherData>())
: SigmaMatcher<M>(
fst, match_type,
SigmaLabel(match_type,
data ? data->SigmaLabel() : MatcherData().SigmaLabel()),
data ? data->RewriteMode() : MatcherData().RewriteMode()),
data_(data) {}
// This makes a copy of the FST.
SigmaFstMatcher(const SigmaFstMatcher<M, flags> &matcher, bool safe = false)
: SigmaMatcher<M>(matcher, safe), data_(matcher.data_) {}
SigmaFstMatcher<M, flags> *Copy(bool safe = false) const override {
return new SigmaFstMatcher<M, flags>(*this, safe);
}
const MatcherData *GetData() const { return data_.get(); }
std::shared_ptr<MatcherData> GetSharedData() const { return data_; }
private:
static Label SigmaLabel(MatchType match_type, Label label) {
if (match_type == MATCH_INPUT && flags & kSigmaFstMatchInput) return label;
if (match_type == MATCH_OUTPUT && flags & kSigmaFstMatchOutput)
return label;
return kNoLabel;
}
std::shared_ptr<MatcherData> data_;
};
extern const char sigma_fst_type[];
extern const char input_sigma_fst_type[];
extern const char output_sigma_fst_type[];
using StdSigmaFst = MatcherFst<ConstFst<StdArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<StdArc>>>,
sigma_fst_type>;
using LogSigmaFst = MatcherFst<ConstFst<LogArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<LogArc>>>,
sigma_fst_type>;
using Log64SigmaFst =
MatcherFst<ConstFst<Log64Arc>,
SigmaFstMatcher<SortedMatcher<ConstFst<Log64Arc>>>,
input_sigma_fst_type>;
using StdInputSigmaFst = MatcherFst<
ConstFst<StdArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<StdArc>>, kSigmaFstMatchInput>,
input_sigma_fst_type>;
using LogInputSigmaFst = MatcherFst<
ConstFst<LogArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<LogArc>>, kSigmaFstMatchInput>,
input_sigma_fst_type>;
using Log64InputSigmaFst = MatcherFst<
ConstFst<Log64Arc>,
SigmaFstMatcher<SortedMatcher<ConstFst<Log64Arc>>, kSigmaFstMatchInput>,
input_sigma_fst_type>;
using StdOutputSigmaFst = MatcherFst<
ConstFst<StdArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<StdArc>>, kSigmaFstMatchOutput>,
output_sigma_fst_type>;
using LogOutputSigmaFst = MatcherFst<
ConstFst<LogArc>,
SigmaFstMatcher<SortedMatcher<ConstFst<LogArc>>, kSigmaFstMatchOutput>,
output_sigma_fst_type>;
using Log64OutputSigmaFst = MatcherFst<
ConstFst<Log64Arc>,
SigmaFstMatcher<SortedMatcher<ConstFst<Log64Arc>>, kSigmaFstMatchOutput>,
output_sigma_fst_type>;
} // namespace fst
#endif // FST_EXTENSIONS_SPECIAL_SIGMA_FST_H_
| 0 |
coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core | coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core/common/make_string.h | /**
* Copyright (c) 2016-present, Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Portions Copyright (c) Microsoft Corporation
#pragma once
#include <locale>
#include <sstream>
#include <type_traits>
namespace onnxruntime {
namespace detail {
inline void MakeStringImpl(std::ostringstream& /*ss*/) noexcept {
}
template <typename T>
inline void MakeStringImpl(std::ostringstream& ss, const T& t) noexcept {
ss << t;
}
template <typename T, typename... Args>
inline void MakeStringImpl(std::ostringstream& ss, const T& t, const Args&... args) noexcept {
MakeStringImpl(ss, t);
MakeStringImpl(ss, args...);
}
// see MakeString comments for explanation of why this is necessary
template <typename... Args>
inline std::string MakeStringImpl(const Args&... args) noexcept {
std::ostringstream ss;
MakeStringImpl(ss, args...);
return ss.str();
}
//
// Infrastructure to convert char[n] to char* to reduce binary size
//
// default is to leave the type as is
template <class T>
struct if_char_array_make_ptr {
using type = T;
};
// specialization that matches an array reference, which is what the char array from a string literal
// used in a call to MakeString will be.
// if the type is a char[n] array we 'decay' it to a char* so that the usages can be folded.
template <class T, size_t N>
struct if_char_array_make_ptr<T (&)[N]> {
// remove a single extent (T[x] -> T, but T[x][y] -> T[y]) so we only match char[x],
// and get the type name without the 'const' so both 'const char (&)[n]' and 'char (&)[n]' are matched.
using element_type = typename std::remove_const<typename std::remove_extent<T>::type>::type;
using type = typename std::conditional<std::is_same<char, element_type>::value, T*, T (&)[N]>::type;
};
// helper to make usage simpler in MakeString
template <class T>
using if_char_array_make_ptr_t = typename if_char_array_make_ptr<T>::type;
} // namespace detail
/**
* Makes a string by concatenating string representations of the arguments.
* This version uses the current locale.
*/
template <typename... Args>
std::string MakeString(const Args&... args) {
// We need to update the types from the MakeString template instantiation to decay any char[n] to char*.
// e.g. MakeString("in", "out") goes from MakeString<char[2], char[3]> to MakeStringImpl<char*, char*>
// so that MakeString("out", "in") will also match MakeStringImpl<char*, char*> instead of requiring
// MakeStringImpl<char[3], char[2]>.
//
// We have to do the type processing before any actual work, so this function purely implements the type processing.
// If we do not do it this way we do not get the full binary size reduction.
//
// See https://stackoverflow.com/a/29418212/684911 for overall details of the approach, but note it does not cover
// the need to do the type processing as a separate step.
return detail::MakeStringImpl(detail::if_char_array_make_ptr_t<Args const&>(args)...);
}
/**
* Makes a string by concatenating string representations of the arguments.
* This version uses std::locale::classic().
*/
template <typename... Args>
std::string MakeStringWithClassicLocale(const Args&... args) {
std::ostringstream ss;
ss.imbue(std::locale::classic());
detail::MakeStringImpl(ss, args...);
return ss.str();
}
// MakeString versions for already-a-string types.
inline std::string MakeString(const std::string& str) {
return str;
}
inline std::string MakeString(const char* cstr) {
return cstr;
}
inline std::string MakeStringWithClassicLocale(const std::string& str) {
return str;
}
inline std::string MakeStringWithClassicLocale(const char* cstr) {
return cstr;
}
} // namespace onnxruntime
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/test-nodejs_10x-win-amd64-opt.yml | build:
template_file: test-win-opt-base.tyml
dependencies:
- "win-amd64-cpu-opt"
- "test-training_16k-linux-amd64-py36m-opt"
test_model_task: "test-training_16k-linux-amd64-py36m-opt"
system_setup:
>
${system.sox_win} && ${nodejs.win.prep_10}
args:
tests_cmdline: "${system.homedir.win}/DeepSpeech/ds/taskcluster/tc-node-tests.sh 10.x 16k"
metadata:
name: "DeepSpeech Windows AMD64 CPU NodeJS 10.x tests"
description: "Testing DeepSpeech for Windows/AMD64 on NodeJS v10.x, CPU only, optimized version"
| 0 |
coqui_public_repos/STT-examples/android_mic_streaming/app/src/main/res | coqui_public_repos/STT-examples/android_mic_streaming/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml | <?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon> | 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/bin/fstsynchronize.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
int fstsynchronize_main(int argc, char **argv);
int main(int argc, char **argv) { return fstsynchronize_main(argc, argv); }
| 0 |
coqui_public_repos/STT-models/dhivehi/itml | coqui_public_repos/STT-models/dhivehi/itml/v0.1.1/LICENSE | GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst | coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/script/synchronize.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_SCRIPT_SYNCHRONIZE_H_
#define FST_SCRIPT_SYNCHRONIZE_H_
#include <utility>
#include <fst/synchronize.h>
#include <fst/script/fst-class.h>
namespace fst {
namespace script {
using SynchronizeArgs = std::pair<const FstClass &, MutableFstClass *>;
template <class Arc>
void Synchronize(SynchronizeArgs *args) {
const Fst<Arc> &ifst = *(std::get<0>(*args).GetFst<Arc>());
MutableFst<Arc> *ofst = std::get<1>(*args)->GetMutableFst<Arc>();
Synchronize(ifst, ofst);
}
void Synchronize(const FstClass &ifst, MutableFstClass *ofst);
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_SYNCHRONIZE_H_
| 0 |
coqui_public_repos/STT/native_client/kenlm/util | coqui_public_repos/STT/native_client/kenlm/util/double-conversion/CMakeLists.txt | # This CMake file was created by Lane Schwartz <[email protected]>
# Explicitly list the source files for this subdirectory
#
# If you add any source files to this subdirectory
# that should be included in the kenlm library,
# (this excludes any unit test files)
# you should add them to the following list:
#
# In order to allow CMake files in the parent directory
# to see this variable definition, we set PARENT_SCOPE.
#
# In order to set correct paths to these files
# when this variable is referenced by CMake files in the parent directory,
# we prefix all files with ${CMAKE_CURRENT_SOURCE_DIR}.
#
set(KENLM_UTIL_DOUBLECONVERSION_SOURCE
${CMAKE_CURRENT_SOURCE_DIR}/bignum-dtoa.cc
${CMAKE_CURRENT_SOURCE_DIR}/bignum.cc
${CMAKE_CURRENT_SOURCE_DIR}/cached-powers.cc
${CMAKE_CURRENT_SOURCE_DIR}/fast-dtoa.cc
${CMAKE_CURRENT_SOURCE_DIR}/fixed-dtoa.cc
${CMAKE_CURRENT_SOURCE_DIR}/strtod.cc
${CMAKE_CURRENT_SOURCE_DIR}/double-to-string.cc
${CMAKE_CURRENT_SOURCE_DIR}/string-to-double.cc
PARENT_SCOPE)
| 0 |
coqui_public_repos | coqui_public_repos/coqui-py/.readthedocs.yaml | # .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
sphinx:
configuration: docs/source/conf.py
python:
install:
- method: pip
path: .
extra_requirements:
- docs
| 0 |
coqui_public_repos/inference-engine/third_party/cereal/include/cereal/external/rapidjson | coqui_public_repos/inference-engine/third_party/cereal/include/cereal/external/rapidjson/internal/stack.h | // Tencent is pleased to support the open source community by making RapidJSON available.
//
// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef CEREAL_RAPIDJSON_INTERNAL_STACK_H_
#define CEREAL_RAPIDJSON_INTERNAL_STACK_H_
#include "../allocators.h"
#include "swap.h"
#include <cstddef>
#if defined(__clang__)
CEREAL_RAPIDJSON_DIAG_PUSH
CEREAL_RAPIDJSON_DIAG_OFF(c++98-compat)
#endif
CEREAL_RAPIDJSON_NAMESPACE_BEGIN
namespace internal {
///////////////////////////////////////////////////////////////////////////////
// Stack
//! A type-unsafe stack for storing different types of data.
/*! \tparam Allocator Allocator for allocating stack memory.
*/
template <typename Allocator>
class Stack {
public:
// Optimization note: Do not allocate memory for stack_ in constructor.
// Do it lazily when first Push() -> Expand() -> Resize().
Stack(Allocator* allocator, size_t stackCapacity) : allocator_(allocator), ownAllocator_(0), stack_(0), stackTop_(0), stackEnd_(0), initialCapacity_(stackCapacity) {
}
#if CEREAL_RAPIDJSON_HAS_CXX11_RVALUE_REFS
Stack(Stack&& rhs)
: allocator_(rhs.allocator_),
ownAllocator_(rhs.ownAllocator_),
stack_(rhs.stack_),
stackTop_(rhs.stackTop_),
stackEnd_(rhs.stackEnd_),
initialCapacity_(rhs.initialCapacity_)
{
rhs.allocator_ = 0;
rhs.ownAllocator_ = 0;
rhs.stack_ = 0;
rhs.stackTop_ = 0;
rhs.stackEnd_ = 0;
rhs.initialCapacity_ = 0;
}
#endif
~Stack() {
Destroy();
}
#if CEREAL_RAPIDJSON_HAS_CXX11_RVALUE_REFS
Stack& operator=(Stack&& rhs) {
if (&rhs != this)
{
Destroy();
allocator_ = rhs.allocator_;
ownAllocator_ = rhs.ownAllocator_;
stack_ = rhs.stack_;
stackTop_ = rhs.stackTop_;
stackEnd_ = rhs.stackEnd_;
initialCapacity_ = rhs.initialCapacity_;
rhs.allocator_ = 0;
rhs.ownAllocator_ = 0;
rhs.stack_ = 0;
rhs.stackTop_ = 0;
rhs.stackEnd_ = 0;
rhs.initialCapacity_ = 0;
}
return *this;
}
#endif
void Swap(Stack& rhs) CEREAL_RAPIDJSON_NOEXCEPT {
internal::Swap(allocator_, rhs.allocator_);
internal::Swap(ownAllocator_, rhs.ownAllocator_);
internal::Swap(stack_, rhs.stack_);
internal::Swap(stackTop_, rhs.stackTop_);
internal::Swap(stackEnd_, rhs.stackEnd_);
internal::Swap(initialCapacity_, rhs.initialCapacity_);
}
void Clear() { stackTop_ = stack_; }
void ShrinkToFit() {
if (Empty()) {
// If the stack is empty, completely deallocate the memory.
Allocator::Free(stack_); // NOLINT (+clang-analyzer-unix.Malloc)
stack_ = 0;
stackTop_ = 0;
stackEnd_ = 0;
}
else
Resize(GetSize());
}
// Optimization note: try to minimize the size of this function for force inline.
// Expansion is run very infrequently, so it is moved to another (probably non-inline) function.
template<typename T>
CEREAL_RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) {
// Expand the stack if needed
if (CEREAL_RAPIDJSON_UNLIKELY(static_cast<std::ptrdiff_t>(sizeof(T) * count) > (stackEnd_ - stackTop_)))
Expand<T>(count);
}
template<typename T>
CEREAL_RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) {
Reserve<T>(count);
return PushUnsafe<T>(count);
}
template<typename T>
CEREAL_RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) {
CEREAL_RAPIDJSON_ASSERT(stackTop_);
CEREAL_RAPIDJSON_ASSERT(static_cast<std::ptrdiff_t>(sizeof(T) * count) <= (stackEnd_ - stackTop_));
T* ret = reinterpret_cast<T*>(stackTop_);
stackTop_ += sizeof(T) * count;
return ret;
}
template<typename T>
T* Pop(size_t count) {
CEREAL_RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T));
stackTop_ -= count * sizeof(T);
return reinterpret_cast<T*>(stackTop_);
}
template<typename T>
T* Top() {
CEREAL_RAPIDJSON_ASSERT(GetSize() >= sizeof(T));
return reinterpret_cast<T*>(stackTop_ - sizeof(T));
}
template<typename T>
const T* Top() const {
CEREAL_RAPIDJSON_ASSERT(GetSize() >= sizeof(T));
return reinterpret_cast<T*>(stackTop_ - sizeof(T));
}
template<typename T>
T* End() { return reinterpret_cast<T*>(stackTop_); }
template<typename T>
const T* End() const { return reinterpret_cast<T*>(stackTop_); }
template<typename T>
T* Bottom() { return reinterpret_cast<T*>(stack_); }
template<typename T>
const T* Bottom() const { return reinterpret_cast<T*>(stack_); }
bool HasAllocator() const {
return allocator_ != 0;
}
Allocator& GetAllocator() {
CEREAL_RAPIDJSON_ASSERT(allocator_);
return *allocator_;
}
bool Empty() const { return stackTop_ == stack_; }
size_t GetSize() const { return static_cast<size_t>(stackTop_ - stack_); }
size_t GetCapacity() const { return static_cast<size_t>(stackEnd_ - stack_); }
private:
template<typename T>
void Expand(size_t count) {
// Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity.
size_t newCapacity;
if (stack_ == 0) {
if (!allocator_)
ownAllocator_ = allocator_ = CEREAL_RAPIDJSON_NEW(Allocator)();
newCapacity = initialCapacity_;
} else {
newCapacity = GetCapacity();
newCapacity += (newCapacity + 1) / 2;
}
size_t newSize = GetSize() + sizeof(T) * count;
if (newCapacity < newSize)
newCapacity = newSize;
Resize(newCapacity);
}
void Resize(size_t newCapacity) {
const size_t size = GetSize(); // Backup the current size
stack_ = static_cast<char*>(allocator_->Realloc(stack_, GetCapacity(), newCapacity));
stackTop_ = stack_ + size;
stackEnd_ = stack_ + newCapacity;
}
void Destroy() {
Allocator::Free(stack_);
CEREAL_RAPIDJSON_DELETE(ownAllocator_); // Only delete if it is owned by the stack
}
// Prohibit copy constructor & assignment operator.
Stack(const Stack&);
Stack& operator=(const Stack&);
Allocator* allocator_;
Allocator* ownAllocator_;
char *stack_;
char *stackTop_;
char *stackEnd_;
size_t initialCapacity_;
};
} // namespace internal
CEREAL_RAPIDJSON_NAMESPACE_END
#if defined(__clang__)
CEREAL_RAPIDJSON_DIAG_POP
#endif
#endif // CEREAL_RAPIDJSON_STACK_H_
| 0 |
coqui_public_repos/TTS/tests | coqui_public_repos/TTS/tests/inputs/test_tacotron_bd_config.json | {
"model": "Tacotron",
"run_name": "test_sample_dataset_run",
"run_description": "sample dataset test run",
// AUDIO PARAMETERS
"audio":{
// stft parameters
"fft_size": 1024, // number of stft frequency levels. Size of the linear spectogram frame.
"win_length": 1024, // stft window length in ms.
"hop_length": 256, // stft window hop-lengh in ms.
"frame_length_ms": null, // stft window length in ms.If null, 'win_length' is used.
"frame_shift_ms": null, // stft window hop-lengh in ms. If null, 'hop_length' is used.
// Audio processing parameters
"sample_rate": 22050, // DATASET-RELATED: wav sample-rate.
"preemphasis": 0.0, // pre-emphasis to reduce spec noise and make it more structured. If 0.0, no -pre-emphasis.
"ref_level_db": 20, // reference level db, theoretically 20db is the sound of air.
// Silence trimming
"do_trim_silence": true,// enable trimming of slience of audio as you load it. LJspeech (true), TWEB (false), Nancy (true)
"trim_db": 60, // threshold for timming silence. Set this according to your dataset.
// Griffin-Lim
"power": 1.5, // value to sharpen wav signals after GL algorithm.
"griffin_lim_iters": 60,// #griffin-lim iterations. 30-60 is a good range. Larger the value, slower the generation.
// MelSpectrogram parameters
"num_mels": 80, // size of the mel spec frame.
"mel_fmin": 0.0, // minimum freq level for mel-spec. ~50 for male and ~95 for female voices. Tune for dataset!!
"mel_fmax": 8000.0, // maximum freq level for mel-spec. Tune for dataset!!
"spec_gain": 20.0,
// Normalization parameters
"signal_norm": true, // normalize spec values. Mean-Var normalization if 'stats_path' is defined otherwise range normalization defined by the other params.
"min_level_db": -100, // lower bound for normalization
"symmetric_norm": true, // move normalization to range [-1, 1]
"max_norm": 4.0, // scale normalization to range [-max_norm, max_norm] or [0, max_norm]
"clip_norm": true, // clip normalized values into the range.
"stats_path": null // DO NOT USE WITH MULTI_SPEAKER MODEL. scaler stats file computed by 'compute_statistics.py'. If it is defined, mean-std based notmalization is used and other normalization params are ignored
},
// VOCABULARY PARAMETERS
// if custom character set is not defined,
// default set in symbols.py is used
// "characters":{
// "pad": "_",
// "eos": "~",
// "bos": "^",
// "characters": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!'(),-.:;? ",
// "punctuations":"!'(),-.:;? ",
// "phonemes":"iyɨʉɯuɪʏʊeøɘəɵɤoɛœɜɞʌɔæɐaɶɑɒᵻʘɓǀɗǃʄǂɠǁʛpbtdʈɖcɟkɡqɢʔɴŋɲɳnɱmʙrʀⱱɾɽɸβfvθðszʃʒʂʐçʝxɣχʁħʕhɦɬɮʋɹɻjɰlɭʎʟˈˌːˑʍwɥʜʢʡɕʑɺɧɚ˞ɫ"
// },
// DISTRIBUTED TRAINING
"distributed":{
"backend": "nccl",
"url": "tcp:\/\/localhost:54321"
},
"reinit_layers": [], // give a list of layer names to restore from the given checkpoint. If not defined, it reloads all heuristically matching layers.
// TRAINING
"batch_size": 1, // Batch size for training. Lower values than 32 might cause hard to learn attention. It is overwritten by 'gradual_training'.
"eval_batch_size":1,
"r": 7, // Number of decoder frames to predict per iteration. Set the initial values if gradual training is enabled.
"gradual_training": [[0, 7, 4]], //set gradual training steps [first_step, r, batch_size]. If it is null, gradual training is disabled. For Tacotron, you might need to reduce the 'batch_size' as you proceeed.
"loss_masking": true, // enable / disable loss masking against the sequence padding.
"ga_alpha": 10.0, // weight for guided attention loss. If > 0, guided attention is enabled.
"mixed_precision": false,
// VALIDATION
"run_eval": true,
"test_delay_epochs": 0, //Until attention is aligned, testing only wastes computation time.
"test_sentences_file": null, // set a file to load sentences to be used for testing. If it is null then we use default english sentences.
// LOSS SETTINGS
"loss_masking": true, // enable / disable loss masking against the sequence padding.
"decoder_loss_alpha": 0.5, // original decoder loss weight. If > 0, it is enabled
"postnet_loss_alpha": 0.25, // original postnet loss weight. If > 0, it is enabled
"postnet_diff_spec_alpha": 0.25, // differential spectral loss weight. If > 0, it is enabled
"decoder_diff_spec_alpha": 0.25, // differential spectral loss weight. If > 0, it is enabled
"decoder_ssim_alpha": 0.5, // decoder ssim loss weight. If > 0, it is enabled
"postnet_ssim_alpha": 0.25, // postnet ssim loss weight. If > 0, it is enabled
"ga_alpha": 5.0, // weight for guided attention loss. If > 0, guided attention is enabled.
"stopnet_pos_weight": 15.0, // pos class weight for stopnet loss since there are way more negative samples than positive samples.
// OPTIMIZER
"noam_schedule": false, // use noam warmup and lr schedule.
"grad_clip": 1.0, // upper limit for gradients for clipping.
"epochs": 1, // total number of epochs to train.
"lr": 0.0001, // Initial learning rate. If Noam decay is active, maximum learning rate.
"wd": 0.000001, // Weight decay weight.
"warmup_steps": 4000, // Noam decay steps to increase the learning rate from 0 to "lr"
"seq_len_norm": false, // Normalize eash sample loss with its length to alleviate imbalanced datasets. Use it if your dataset is small or has skewed distribution of sequence lengths.
// TACOTRON PRENET
"memory_size": -1, // ONLY TACOTRON - size of the memory queue used fro storing last decoder predictions for auto-regression. If < 0, memory queue is disabled and decoder only uses the last prediction frame.
"prenet_type": "bn", // "original" or "bn".
"prenet_dropout": false, // enable/disable dropout at prenet.
// TACOTRON ATTENTION
"attention_type": "original", // 'original' , 'graves', 'dynamic_convolution'
"attention_heads": 4, // number of attention heads (only for 'graves')
"attention_norm": "sigmoid", // softmax or sigmoid.
"windowing": false, // Enables attention windowing. Used only in eval mode.
"use_forward_attn": false, // if it uses forward attention. In general, it aligns faster.
"forward_attn_mask": false, // Additional masking forcing monotonicity only in eval mode.
"transition_agent": false, // enable/disable transition agent of forward attention.
"location_attn": true, // enable_disable location sensitive attention. It is enabled for TACOTRON by default.
"bidirectional_decoder": true, // use https://arxiv.org/abs/1907.09006. Use it, if attention does not work well with your dataset.
"double_decoder_consistency": false, // use DDC explained here https://erogol.com/solving-attention-problems-of-tts-models-with-double-decoder-consistency-draft/
"ddc_r": 7, // reduction rate for coarse decoder.
// STOPNET
"stopnet": true, // Train stopnet predicting the end of synthesis.
"separate_stopnet": true, // Train stopnet seperately if 'stopnet==true'. It prevents stopnet loss to influence the rest of the model. It causes a better model, but it trains SLOWER.
// TENSORBOARD and LOGGING
"print_step": 1, // Number of steps to log training on console.
"tb_plot_step": 100, // Number of steps to plot TB training figures.
"print_eval": false, // If True, it prints intermediate loss values in evalulation.
"save_step": 10000, // Number of training steps expected to save traninpg stats and checkpoints.
"checkpoint": true, // If true, it saves checkpoints per "save_step"
"keep_all_best": true, // If true, keeps all best_models after keep_after steps
"keep_after": 10000, // Global step after which to keep best models if keep_all_best is true
"tb_model_param_stats": false, // true, plots param stats per layer on tensorboard. Might be memory consuming, but good for debugging.
// DATA LOADING
"text_cleaner": "phoneme_cleaners",
"enable_eos_bos_chars": false, // enable/disable beginning of sentence and end of sentence chars.
"num_loader_workers": 0, // number of training data loader processes. Don't set it too big. 4-8 are good values.
"num_eval_loader_workers": 0, // number of evaluation data loader processes.
"batch_group_size": 0, //Number of batches to shuffle after bucketing.
"min_seq_len": 6, // DATASET-RELATED: minimum text length to use in training
"max_seq_len": 153, // DATASET-RELATED: maximum text length
"compute_input_seq_cache": true,
// PATHS
"output_path": "tests/train_outputs/",
// PHONEMES
"phoneme_cache_path": "tests/train_outputs/phoneme_cache/", // phoneme computation is slow, therefore, it caches results in the given folder.
"use_phonemes": false, // use phonemes instead of raw characters. It is suggested for better pronounciation.
"phoneme_language": "en-us", // depending on your target language, pick one from https://github.com/bootphon/phonemizer#languages
// MULTI-SPEAKER and GST
"use_d_vector_file": false,
"d_vector_file": null,
"use_speaker_embedding": false, // use speaker embedding to enable multi-speaker learning.
"use_gst": true, // use global style tokens
"gst": { // gst parameter if gst is enabled
"gst_style_input": null, // Condition the style input either on a
// -> wave file [path to wave] or
// -> dictionary using the style tokens {'token1': 'value', 'token2': 'value'} example {"0": 0.15, "1": 0.15, "5": -0.15}
// with the dictionary being len(dict) == len(gst_style_tokens).
"gst_use_speaker_embedding": true, // if true pass speaker embedding in attention input GST.
"gst_embedding_dim": 512,
"gst_num_heads": 4,
"gst_style_tokens": 10
},
// DATASETS
"train_portion": 0.1, // dataset portion used for training. It is mainly for internal experiments.
"eval_portion": 0.1, // dataset portion used for training. It is mainly for internal experiments.
"datasets": // List of datasets. They all merged and they get different speaker_ids.
[
{
"formatter": "ljspeech",
"path": "tests/data/ljspeech/",
"meta_file_train": "metadata.csv",
"meta_file_val": "metadata.csv"
}
]
}
| 0 |
coqui_public_repos/STT-examples/python_websocket_server/helm/stt_server | coqui_public_repos/STT-examples/python_websocket_server/helm/stt_server/overrides/values.dev.yaml | # Default values for stt-server.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: "<docker_repo_path>"
pullPolicy: Always
# Overrides the image tag whose default is the chart appVersion.
tag: "latest"
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
podAnnotations: {}
podSecurityContext: {}
# fsGroup: 2000
securityContext:
readOnlyRootFilesystem: false
runAsNonRoot: true
runAsUser: 1000
service:
type: ClusterIP
port: 8080
ingress:
enabled: true
annotations:
nginx.ingress.kubernetes.io/enable-cors: "true"
nginx.ingress.kubernetes.io/cors-allow-methods: "PUT, GET, POST, OPTIONS"
nginx.ingress.kubernetes.io/cors-allow-origin: "*"
nginx.ingress.kubernetes.io/cors-allow-credentials: "true"
nginx.ingress.kubernetes.io/rewrite-target: /
nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
nginx.ingress.kubernetes.io/proxy-send-timeout: "3600"
nginx.ingress.kubernetes.io/proxy-body-size: 20m
hosts:
- host: "<host>"
paths: ["/stt-server"]
tls:
- secretName: "<secret_name>"
hosts:
- "<host>"
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}
| 0 |
coqui_public_repos/STT | coqui_public_repos/STT/taskcluster/test-electronjs_v8.0_multiarchpkg-win-amd64-opt.yml | build:
template_file: test-win-opt-base.tyml
dependencies:
- "node-package-cpu"
- "test-training_16k-linux-amd64-py36m-opt"
test_model_task: "test-training_16k-linux-amd64-py36m-opt"
system_setup:
>
${system.sox_win} && ${nodejs.win.prep_12}
args:
tests_cmdline: "${system.homedir.win}/DeepSpeech/ds/taskcluster/tc-electron-tests.sh 12.x 8.0.1 16k"
metadata:
name: "DeepSpeech Windows AMD64 CPU ElectronJS MultiArch Package v8.0 tests"
description: "Testing DeepSpeech for Windows/AMD64 on ElectronJS MultiArch Package v8.0, CPU only, optimized version"
| 0 |
coqui_public_repos/TTS/TTS | coqui_public_repos/TTS/TTS/vocoder/README.md | # Mozilla TTS Vocoders (Experimental)
Here there are vocoder model implementations which can be combined with the other TTS models.
Currently, following models are implemented:
- Melgan
- MultiBand-Melgan
- ParallelWaveGAN
- GAN-TTS (Discriminator Only)
It is also very easy to adapt different vocoder models as we provide a flexible and modular (but not too modular) framework.
## Training a model
You can see here an example (Soon)[Colab Notebook]() training MelGAN with LJSpeech dataset.
In order to train a new model, you need to gather all wav files into a folder and give this folder to `data_path` in '''config.json'''
You need to define other relevant parameters in your ```config.json``` and then start traning with the following command.
```CUDA_VISIBLE_DEVICES='0' python tts/bin/train_vocoder.py --config_path path/to/config.json```
Example config files can be found under `tts/vocoder/configs/` folder.
You can continue a previous training run by the following command.
```CUDA_VISIBLE_DEVICES='0' python tts/bin/train_vocoder.py --continue_path path/to/your/model/folder```
You can fine-tune a pre-trained model by the following command.
```CUDA_VISIBLE_DEVICES='0' python tts/bin/train_vocoder.py --restore_path path/to/your/model.pth```
Restoring a model starts a new training in a different folder. It only restores model weights with the given checkpoint file. However, continuing a training starts from the same directory where the previous training run left off.
You can also follow your training runs on Tensorboard as you do with our TTS models.
## Acknowledgement
Thanks to @kan-bayashi for his [repository](https://github.com/kan-bayashi/ParallelWaveGAN) being the start point of our work.
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/cache.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// An FST implementation that caches FST elements of a delayed computation.
#ifndef FST_CACHE_H_
#define FST_CACHE_H_
#include <functional>
#include <unordered_map>
using std::unordered_map;
using std::unordered_multimap;
#include <list>
#include <vector>
#include <fst/flags.h>
#include <fst/log.h>
#include <fst/vector-fst.h>
DECLARE_bool(fst_default_cache_gc);
DECLARE_int64_t(fst_default_cache_gc_limit);
namespace fst {
// Options for controlling caching behavior; higher level than CacheImplOptions.
struct CacheOptions {
bool gc; // Enables GC.
size_t gc_limit; // Number of bytes allowed before GC.
explicit CacheOptions(bool gc = FLAGS_fst_default_cache_gc,
size_t gc_limit = FLAGS_fst_default_cache_gc_limit)
: gc(gc), gc_limit(gc_limit) {}
};
// Options for controlling caching behavior, at a lower level than
// CacheOptions; templated on the cache store and allows passing the store.
template <class CacheStore>
struct CacheImplOptions {
bool gc; // Enables GC.
size_t gc_limit; // Number of bytes allowed before GC.
CacheStore *store; // Cache store.
bool own_store; // Should CacheImpl takes ownership of the store?
explicit CacheImplOptions(bool gc = FLAGS_fst_default_cache_gc,
size_t gc_limit = FLAGS_fst_default_cache_gc_limit,
CacheStore *store = nullptr)
: gc(gc), gc_limit(gc_limit), store(store), own_store(true) {}
explicit CacheImplOptions(const CacheOptions &opts)
: gc(opts.gc), gc_limit(opts.gc_limit), store(nullptr), own_store(true) {}
};
// Cache flags.
constexpr uint32_t kCacheFinal = 0x0001; // Final weight has been cached.
constexpr uint32_t kCacheArcs = 0x0002; // Arcs have been cached.
constexpr uint32_t kCacheInit = 0x0004; // Initialized by GC.
constexpr uint32_t kCacheRecent = 0x0008; // Visited since GC.
constexpr uint32_t kCacheFlags =
kCacheFinal | kCacheArcs | kCacheInit | kCacheRecent;
// Cache state, with arcs stored in a per-state std::vector.
template <class A, class M = PoolAllocator<A>>
class CacheState {
public:
using Arc = A;
using Label = typename Arc::Label;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using ArcAllocator = M;
using StateAllocator =
typename ArcAllocator::template rebind<CacheState<A, M>>::other;
// Provides STL allocator for arcs.
explicit CacheState(const ArcAllocator &alloc)
: final_(Weight::Zero()),
niepsilons_(0),
noepsilons_(0),
arcs_(alloc),
flags_(0),
ref_count_(0) {}
CacheState(const CacheState<A> &state, const ArcAllocator &alloc)
: final_(state.Final()),
niepsilons_(state.NumInputEpsilons()),
noepsilons_(state.NumOutputEpsilons()),
arcs_(state.arcs_.begin(), state.arcs_.end(), alloc),
flags_(state.Flags()),
ref_count_(0) {}
void Reset() {
final_ = Weight::Zero();
niepsilons_ = 0;
noepsilons_ = 0;
ref_count_ = 0;
flags_ = 0;
arcs_.clear();
}
Weight Final() const { return final_; }
size_t NumInputEpsilons() const { return niepsilons_; }
size_t NumOutputEpsilons() const { return noepsilons_; }
size_t NumArcs() const { return arcs_.size(); }
const Arc &GetArc(size_t n) const { return arcs_[n]; }
// Used by the ArcIterator<Fst<Arc>> efficient implementation.
const Arc *Arcs() const { return !arcs_.empty() ? &arcs_[0] : nullptr; }
// Accesses flags; used by the caller.
uint32_t Flags() const { return flags_; }
// Accesses ref count; used by the caller.
int RefCount() const { return ref_count_; }
void SetFinal(Weight weight) { final_ = std::move(weight); }
void ReserveArcs(size_t n) { arcs_.reserve(n); }
// Adds one arc at a time with all needed book-keeping; use PushArc and
// SetArcs for a more efficient alternative.
void AddArc(const Arc &arc) {
arcs_.push_back(arc);
if (arc.ilabel == 0) ++niepsilons_;
if (arc.olabel == 0) ++noepsilons_;
}
// Adds one arc at a time with delayed book-keeping; finalize with SetArcs().
void PushArc(const Arc &arc) { arcs_.push_back(arc); }
// Finalizes arcs book-keeping; call only once.
void SetArcs() {
for (const auto &arc : arcs_) {
if (arc.ilabel == 0) ++niepsilons_;
if (arc.olabel == 0) ++noepsilons_;
}
}
// Modifies nth arc.
void SetArc(const Arc &arc, size_t n) {
if (arcs_[n].ilabel == 0) --niepsilons_;
if (arcs_[n].olabel == 0) --noepsilons_;
if (arc.ilabel == 0) ++niepsilons_;
if (arc.olabel == 0) ++noepsilons_;
arcs_[n] = arc;
}
// Deletes all arcs.
void DeleteArcs() {
niepsilons_ = 0;
noepsilons_ = 0;
arcs_.clear();
}
void DeleteArcs(size_t n) {
for (size_t i = 0; i < n; ++i) {
if (arcs_.back().ilabel == 0) --niepsilons_;
if (arcs_.back().olabel == 0) --noepsilons_;
arcs_.pop_back();
}
}
// Sets status flags; used by the caller.
void SetFlags(uint32_t flags, uint32_t mask) const {
flags_ &= ~mask;
flags_ |= flags;
}
// Mutates reference counts; used by the caller.
int IncrRefCount() const { return ++ref_count_; }
int DecrRefCount() const { return --ref_count_; }
// Used by the ArcIterator<Fst<Arc>> efficient implementation.
int *MutableRefCount() const { return &ref_count_; }
// Used for state class allocation.
void *operator new(size_t size, StateAllocator *alloc) {
return alloc->allocate(1);
}
// For state destruction and memory freeing.
static void Destroy(CacheState<Arc> *state, StateAllocator *alloc) {
if (state) {
state->~CacheState<Arc>();
alloc->deallocate(state, 1);
}
}
private:
Weight final_; // Final weight.
size_t niepsilons_; // # of input epsilons.
size_t noepsilons_; // # of output epsilons.
std::vector<Arc, ArcAllocator> arcs_; // Arcs representation.
mutable uint32_t flags_;
mutable int ref_count_; // If 0, available for GC.
};
// Cache store, allocating and storing states, providing a mapping from state
// IDs to cached states, and an iterator over these states. The state template
// argument must implement the CacheState interface. The state for a StateId s
// is constructed when requested by GetMutableState(s) if it is not yet stored.
// Initially, a state has a reference count of zero, but the user may increment
// or decrement this to control the time of destruction. In particular, a state
// is destroyed when:
//
// 1. This instance is destroyed, or
// 2. Clear() or Delete() is called, or
// 3. Possibly (implementation-dependently) when:
// - Garbage collection is enabled (as defined by opts.gc),
// - The cache store size exceeds the limits (as defined by opts.gc_limits),
// - The state's reference count is zero, and
// - The state is not the most recently requested state.
//
// template <class S>
// class CacheStore {
// public:
// using State = S;
// using Arc = typename State::Arc;
// using StateId = typename Arc::StateId;
//
// // Required constructors/assignment operators.
// explicit CacheStore(const CacheOptions &opts);
//
// // Returns nullptr if state is not stored.
// const State *GetState(StateId s);
//
// // Creates state if state is not stored.
// State *GetMutableState(StateId s);
//
// // Similar to State::AddArc() but updates cache store book-keeping.
// void AddArc(State *state, const Arc &arc);
//
// // Similar to State::SetArcs() but updates cache store book-keeping; call
// // only once.
// void SetArcs(State *state);
//
// // Similar to State::DeleteArcs() but updates cache store book-keeping.
//
// void DeleteArcs(State *state);
//
// void DeleteArcs(State *state, size_t n);
//
// // Deletes all cached states.
// void Clear();
//
// // Iterates over cached states (in an arbitrary order); only needed if
// // opts.gc is true.
// bool Done() const; // End of iteration.
// StateId Value() const; // Current state.
// void Next(); // Advances to next state (when !Done).
// void Reset(); // Returns to initial condition.
// void Delete(); // Deletes current state and advances to next.
// };
// Container cache stores.
// This class uses a vector of pointers to states to store cached states.
template <class S>
class VectorCacheStore {
public:
using State = S;
using Arc = typename State::Arc;
using StateId = typename Arc::StateId;
using StateList = std::list<StateId, PoolAllocator<StateId>>;
// Required constructors/assignment operators.
explicit VectorCacheStore(const CacheOptions &opts) : cache_gc_(opts.gc) {
Clear();
Reset();
}
VectorCacheStore(const VectorCacheStore<S> &store)
: cache_gc_(store.cache_gc_) {
CopyStates(store);
Reset();
}
~VectorCacheStore() { Clear(); }
VectorCacheStore<State> &operator=(const VectorCacheStore<State> &store) {
if (this != &store) {
CopyStates(store);
Reset();
}
return *this;
}
// Returns nullptr if state is not stored.
const State *GetState(StateId s) const {
return s < state_vec_.size() ? state_vec_[s] : nullptr;
}
// Creates state if state is not stored.
State *GetMutableState(StateId s) {
State *state = nullptr;
if (s >= state_vec_.size()) {
state_vec_.resize(s + 1, nullptr);
} else {
state = state_vec_[s];
}
if (!state) {
state = new (&state_alloc_) State(arc_alloc_);
state_vec_[s] = state;
if (cache_gc_) state_list_.push_back(s);
}
return state;
}
// Similar to State::AddArc() but updates cache store book-keeping
void AddArc(State *state, const Arc &arc) { state->AddArc(arc); }
// Similar to State::SetArcs() but updates cache store book-keeping; call
// only once.
void SetArcs(State *state) { state->SetArcs(); }
// Deletes all arcs.
void DeleteArcs(State *state) { state->DeleteArcs(); }
// Deletes some arcs.
void DeleteArcs(State *state, size_t n) { state->DeleteArcs(n); }
// Deletes all cached states.
void Clear() {
for (StateId s = 0; s < state_vec_.size(); ++s) {
State::Destroy(state_vec_[s], &state_alloc_);
}
state_vec_.clear();
state_list_.clear();
}
// Iterates over cached states (in an arbitrary order); only works if GC is
// enabled (o.w. avoiding state_list_ overhead).
bool Done() const { return iter_ == state_list_.end(); }
StateId Value() const { return *iter_; }
void Next() { ++iter_; }
void Reset() { iter_ = state_list_.begin(); }
// Deletes current state and advances to next.
void Delete() {
State::Destroy(state_vec_[*iter_], &state_alloc_);
state_vec_[*iter_] = nullptr;
state_list_.erase(iter_++);
}
private:
void CopyStates(const VectorCacheStore<State> &store) {
Clear();
state_vec_.reserve(store.state_vec_.size());
for (StateId s = 0; s < store.state_vec_.size(); ++s) {
State *state = nullptr;
const auto *store_state = store.state_vec_[s];
if (store_state) {
state = new (&state_alloc_) State(*store_state, arc_alloc_);
if (cache_gc_) state_list_.push_back(s);
}
state_vec_.push_back(state);
}
}
bool cache_gc_; // Supports iteration when true.
std::vector<State *> state_vec_; // Vector of states (or null).
StateList state_list_; // List of states.
typename StateList::iterator iter_; // State list iterator.
typename State::StateAllocator state_alloc_; // For state allocation.
typename State::ArcAllocator arc_alloc_; // For arc allocation.
};
// This class uses a hash map from state IDs to pointers to cached states.
template <class S>
class HashCacheStore {
public:
using State = S;
using Arc = typename State::Arc;
using StateId = typename Arc::StateId;
using StateMap =
std::unordered_map<StateId, State *, std::hash<StateId>,
std::equal_to<StateId>,
PoolAllocator<std::pair<const StateId, State *>>>;
// Required constructors/assignment operators.
explicit HashCacheStore(const CacheOptions &opts) {
Clear();
Reset();
}
HashCacheStore(const HashCacheStore<S> &store) {
CopyStates(store);
Reset();
}
~HashCacheStore() { Clear(); }
HashCacheStore<State> &operator=(const HashCacheStore<State> &store) {
if (this != &store) {
CopyStates(store);
Reset();
}
return *this;
}
// Returns nullptr if state is not stored.
const State *GetState(StateId s) const {
const auto it = state_map_.find(s);
return it != state_map_.end() ? it->second : nullptr;
}
// Creates state if state is not stored.
State *GetMutableState(StateId s) {
auto *&state = state_map_[s];
if (!state) state = new (&state_alloc_) State(arc_alloc_);
return state;
}
// Similar to State::AddArc() but updates cache store book-keeping.
void AddArc(State *state, const Arc &arc) { state->AddArc(arc); }
// Similar to State::SetArcs() but updates internal cache size; call only
// once.
void SetArcs(State *state) { state->SetArcs(); }
// Deletes all arcs.
void DeleteArcs(State *state) { state->DeleteArcs(); }
// Deletes some arcs.
void DeleteArcs(State *state, size_t n) { state->DeleteArcs(n); }
// Deletes all cached states.
void Clear() {
for (auto it = state_map_.begin(); it != state_map_.end(); ++it) {
State::Destroy(it->second, &state_alloc_);
}
state_map_.clear();
}
// Iterates over cached states (in an arbitrary order).
bool Done() const { return iter_ == state_map_.end(); }
StateId Value() const { return iter_->first; }
void Next() { ++iter_; }
void Reset() { iter_ = state_map_.begin(); }
// Deletes current state and advances to next.
void Delete() {
State::Destroy(iter_->second, &state_alloc_);
state_map_.erase(iter_++);
}
private:
void CopyStates(const HashCacheStore<State> &store) {
Clear();
for (auto it = store.state_map_.begin(); it != store.state_map_.end();
++it) {
state_map_[it->first] =
new (&state_alloc_) State(*it->second, arc_alloc_);
}
}
StateMap state_map_; // Map from state ID to state.
typename StateMap::iterator iter_; // State map iterator.
typename State::StateAllocator state_alloc_; // For state allocation.
typename State::ArcAllocator arc_alloc_; // For arc allocation.
};
// Garbage-colllection cache stores.
// This class implements a simple garbage collection scheme when
// 'opts.gc_limit = 0'. In particular, the first cached state is reused for each
// new state so long as the reference count is zero on the to-be-reused state.
// Otherwise, the full underlying store is used. The caller can increment the
// reference count to inhibit the GC of in-use states (e.g., in an ArcIterator).
//
// The typical use case for this optimization is when a single pass over a
// cached
// FST is performed with only one-state expanded at a time.
template <class CacheStore>
class FirstCacheStore {
public:
using State = typename CacheStore::State;
using Arc = typename State::Arc;
using StateId = typename Arc::StateId;
// Required constructors/assignment operators.
explicit FirstCacheStore(const CacheOptions &opts)
: store_(opts),
cache_gc_(opts.gc_limit == 0), // opts.gc ignored historically.
cache_first_state_id_(kNoStateId),
cache_first_state_(nullptr) {}
FirstCacheStore(const FirstCacheStore<CacheStore> &store)
: store_(store.store_),
cache_gc_(store.cache_gc_),
cache_first_state_id_(store.cache_first_state_id_),
cache_first_state_(store.cache_first_state_id_ != kNoStateId
? store_.GetMutableState(0)
: nullptr) {}
FirstCacheStore<CacheStore> &operator=(
const FirstCacheStore<CacheStore> &store) {
if (this != &store) {
store_ = store.store_;
cache_gc_ = store.cache_gc_;
cache_first_state_id_ = store.cache_first_state_id_;
cache_first_state_ = store.cache_first_state_id_ != kNoStateId
? store_.GetMutableState(0)
: nullptr;
}
return *this;
}
// Returns nullptr if state is not stored.
const State *GetState(StateId s) const {
// store_ state 0 may hold first cached state; the rest are shifted by 1.
return s == cache_first_state_id_ ? cache_first_state_
: store_.GetState(s + 1);
}
// Creates state if state is not stored.
State *GetMutableState(StateId s) {
// store_ state 0 used to hold first cached state; the rest are shifted by
// 1.
if (cache_first_state_id_ == s) {
return cache_first_state_; // Request for first cached state.
}
if (cache_gc_) {
if (cache_first_state_id_ == kNoStateId) {
cache_first_state_id_ = s; // Sets first cached state.
cache_first_state_ = store_.GetMutableState(0);
cache_first_state_->SetFlags(kCacheInit, kCacheInit);
cache_first_state_->ReserveArcs(2 * kAllocSize);
return cache_first_state_;
} else if (cache_first_state_->RefCount() == 0) {
cache_first_state_id_ = s; // Updates first cached state.
cache_first_state_->Reset();
cache_first_state_->SetFlags(kCacheInit, kCacheInit);
return cache_first_state_;
} else { // Keeps first cached state.
cache_first_state_->SetFlags(0, kCacheInit); // Clears initialized bit.
cache_gc_ = false; // Disables GC.
}
}
auto *state = store_.GetMutableState(s + 1);
return state;
}
// Similar to State::AddArc() but updates cache store book-keeping.
void AddArc(State *state, const Arc &arc) { store_.AddArc(state, arc); }
// Similar to State::SetArcs() but updates internal cache size; call only
// once.
void SetArcs(State *state) { store_.SetArcs(state); }
// Deletes all arcs
void DeleteArcs(State *state) { store_.DeleteArcs(state); }
// Deletes some arcs
void DeleteArcs(State *state, size_t n) { store_.DeleteArcs(state, n); }
// Deletes all cached states
void Clear() {
store_.Clear();
cache_first_state_id_ = kNoStateId;
cache_first_state_ = nullptr;
}
// Iterates over cached states (in an arbitrary order). Only needed if GC is
// enabled.
bool Done() const { return store_.Done(); }
StateId Value() const {
// store_ state 0 may hold first cached state; rest shifted + 1.
const auto s = store_.Value();
return s ? s - 1 : cache_first_state_id_;
}
void Next() { store_.Next(); }
void Reset() { store_.Reset(); }
// Deletes current state and advances to next.
void Delete() {
if (Value() == cache_first_state_id_) {
cache_first_state_id_ = kNoStateId;
cache_first_state_ = nullptr;
}
store_.Delete();
}
private:
CacheStore store_; // Underlying store.
bool cache_gc_; // GC enabled.
StateId cache_first_state_id_; // First cached state ID.
State *cache_first_state_; // First cached state.
};
// This class implements mark-sweep garbage collection on an underlying cache
// store. If GC is enabled, garbage collection of states is performed in a
// rough approximation of LRU order once when 'gc_limit' bytes is reached. The
// caller can increment the reference count to inhibit the GC of in-use state
// (e.g., in an ArcIterator). With GC enabled, the 'gc_limit' parameter allows
// the caller to trade-off time vs. space.
template <class CacheStore>
class GCCacheStore {
public:
using State = typename CacheStore::State;
using Arc = typename State::Arc;
using StateId = typename Arc::StateId;
// Required constructors/assignment operators.
explicit GCCacheStore(const CacheOptions &opts)
: store_(opts),
cache_gc_request_(opts.gc),
cache_limit_(opts.gc_limit > kMinCacheLimit ? opts.gc_limit
: kMinCacheLimit),
cache_gc_(false),
cache_size_(0) {}
// Returns 0 if state is not stored.
const State *GetState(StateId s) const { return store_.GetState(s); }
// Creates state if state is not stored
State *GetMutableState(StateId s) {
auto *state = store_.GetMutableState(s);
if (cache_gc_request_ && !(state->Flags() & kCacheInit)) {
state->SetFlags(kCacheInit, kCacheInit);
cache_size_ += sizeof(State) + state->NumArcs() * sizeof(Arc);
// GC is enabled once an uninited state (from underlying store) is seen.
cache_gc_ = true;
if (cache_size_ > cache_limit_) GC(state, false);
}
return state;
}
// Similar to State::AddArc() but updates cache store book-keeping.
void AddArc(State *state, const Arc &arc) {
store_.AddArc(state, arc);
if (cache_gc_ && (state->Flags() & kCacheInit)) {
cache_size_ += sizeof(Arc);
if (cache_size_ > cache_limit_) GC(state, false);
}
}
// Similar to State::SetArcs() but updates internal cache size; call only
// once.
void SetArcs(State *state) {
store_.SetArcs(state);
if (cache_gc_ && (state->Flags() & kCacheInit)) {
cache_size_ += state->NumArcs() * sizeof(Arc);
if (cache_size_ > cache_limit_) GC(state, false);
}
}
// Deletes all arcs.
void DeleteArcs(State *state) {
if (cache_gc_ && (state->Flags() & kCacheInit)) {
cache_size_ -= state->NumArcs() * sizeof(Arc);
}
store_.DeleteArcs(state);
}
// Deletes some arcs.
void DeleteArcs(State *state, size_t n) {
if (cache_gc_ && (state->Flags() & kCacheInit)) {
cache_size_ -= n * sizeof(Arc);
}
store_.DeleteArcs(state, n);
}
// Deletes all cached states.
void Clear() {
store_.Clear();
cache_size_ = 0;
}
// Iterates over cached states (in an arbitrary order); only needed if GC is
// enabled.
bool Done() const { return store_.Done(); }
StateId Value() const { return store_.Value(); }
void Next() { store_.Next(); }
void Reset() { store_.Reset(); }
// Deletes current state and advances to next.
void Delete() {
if (cache_gc_) {
const auto *state = store_.GetState(Value());
if (state->Flags() & kCacheInit) {
cache_size_ -= sizeof(State) + state->NumArcs() * sizeof(Arc);
}
}
store_.Delete();
}
// Removes from the cache store (not referenced-counted and not the current)
// states that have not been accessed since the last GC until at most
// cache_fraction * cache_limit_ bytes are cached. If that fails to free
// enough, attempts to uncaching recently visited states as well. If still
// unable to free enough memory, then widens cache_limit_.
void GC(const State *current, bool free_recent, float cache_fraction = 0.666);
// Returns the current cache size in bytes or 0 if GC is disabled.
size_t CacheSize() const { return cache_size_; }
// Returns the cache limit in bytes.
size_t CacheLimit() const { return cache_limit_; }
private:
static constexpr size_t kMinCacheLimit = 8096; // Minimum cache limit.
CacheStore store_; // Underlying store.
bool cache_gc_request_; // GC requested but possibly not yet enabled.
size_t cache_limit_; // Number of bytes allowed before GC.
bool cache_gc_; // GC enabled
size_t cache_size_; // Number of bytes cached.
};
template <class CacheStore>
void GCCacheStore<CacheStore>::GC(const State *current, bool free_recent,
float cache_fraction) {
if (!cache_gc_) return;
VLOG(2) << "GCCacheStore: Enter GC: object = "
<< "(" << this << "), free recently cached = " << free_recent
<< ", cache size = " << cache_size_
<< ", cache frac = " << cache_fraction
<< ", cache limit = " << cache_limit_ << "\n";
size_t cache_target = cache_fraction * cache_limit_;
store_.Reset();
while (!store_.Done()) {
auto *state = store_.GetMutableState(store_.Value());
if (cache_size_ > cache_target && state->RefCount() == 0 &&
(free_recent || !(state->Flags() & kCacheRecent)) && state != current) {
if (state->Flags() & kCacheInit) {
size_t size = sizeof(State) + state->NumArcs() * sizeof(Arc);
if (size < cache_size_) {
cache_size_ -= size;
}
}
store_.Delete();
} else {
state->SetFlags(0, kCacheRecent);
store_.Next();
}
}
if (!free_recent && cache_size_ > cache_target) { // Recurses on recent.
GC(current, true, cache_fraction);
} else if (cache_target > 0) { // Widens cache limit.
while (cache_size_ > cache_target) {
cache_limit_ *= 2;
cache_target *= 2;
}
} else if (cache_size_ > 0) {
FSTERROR() << "GCCacheStore:GC: Unable to free all cached states";
}
VLOG(2) << "GCCacheStore: Exit GC: object = "
<< "(" << this << "), free recently cached = " << free_recent
<< ", cache size = " << cache_size_
<< ", cache frac = " << cache_fraction
<< ", cache limit = " << cache_limit_ << "\n";
}
template <class CacheStore>
constexpr size_t GCCacheStore<CacheStore>::kMinCacheLimit;
// This class is the default cache state and store used by CacheBaseImpl.
// It uses VectorCacheStore for storage decorated by FirstCacheStore
// and GCCacheStore to do (optional) garbage collection.
template <class Arc>
class DefaultCacheStore
: public GCCacheStore<FirstCacheStore<VectorCacheStore<CacheState<Arc>>>> {
public:
explicit DefaultCacheStore(const CacheOptions &opts)
: GCCacheStore<FirstCacheStore<VectorCacheStore<CacheState<Arc>>>>(opts) {
}
};
namespace internal {
// This class is used to cache FST elements stored in states of type State
// (see CacheState) with the flags used to indicate what has been cached. Use
// HasStart(), HasFinal(), and HasArcs() to determine if cached and SetStart(),
// SetFinal(), AddArc(), (or PushArc() and SetArcs()) to cache. Note that you
// must set the final weight even if the state is non-final to mark it as
// cached. The state storage method and any garbage collection policy are
// determined by the cache store. If the store is passed in with the options,
// CacheBaseImpl takes ownership.
template <class State,
class CacheStore = DefaultCacheStore<typename State::Arc>>
class CacheBaseImpl : public FstImpl<typename State::Arc> {
public:
using Arc = typename State::Arc;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using Store = CacheStore;
using FstImpl<Arc>::Type;
using FstImpl<Arc>::Properties;
explicit CacheBaseImpl(const CacheOptions &opts = CacheOptions())
: has_start_(false),
cache_start_(kNoStateId),
nknown_states_(0),
min_unexpanded_state_id_(0),
max_expanded_state_id_(-1),
cache_gc_(opts.gc),
cache_limit_(opts.gc_limit),
cache_store_(new CacheStore(opts)),
new_cache_store_(true),
own_cache_store_(true) {}
explicit CacheBaseImpl(const CacheImplOptions<CacheStore> &opts)
: has_start_(false),
cache_start_(kNoStateId),
nknown_states_(0),
min_unexpanded_state_id_(0),
max_expanded_state_id_(-1),
cache_gc_(opts.gc),
cache_limit_(opts.gc_limit),
cache_store_(opts.store ? opts.store : new CacheStore(CacheOptions(
opts.gc, opts.gc_limit))),
new_cache_store_(!opts.store),
own_cache_store_(opts.store ? opts.own_store : true) {}
// Preserve gc parameters. If preserve_cache is true, also preserves
// cache data.
CacheBaseImpl(const CacheBaseImpl<State, CacheStore> &impl,
bool preserve_cache = false)
: FstImpl<Arc>(),
has_start_(false),
cache_start_(kNoStateId),
nknown_states_(0),
min_unexpanded_state_id_(0),
max_expanded_state_id_(-1),
cache_gc_(impl.cache_gc_),
cache_limit_(impl.cache_limit_),
cache_store_(new CacheStore(CacheOptions(cache_gc_, cache_limit_))),
new_cache_store_(impl.new_cache_store_ || !preserve_cache),
own_cache_store_(true) {
if (preserve_cache) {
*cache_store_ = *impl.cache_store_;
has_start_ = impl.has_start_;
cache_start_ = impl.cache_start_;
nknown_states_ = impl.nknown_states_;
expanded_states_ = impl.expanded_states_;
min_unexpanded_state_id_ = impl.min_unexpanded_state_id_;
max_expanded_state_id_ = impl.max_expanded_state_id_;
}
}
~CacheBaseImpl() override { if (own_cache_store_) delete cache_store_; }
void SetStart(StateId s) {
cache_start_ = s;
has_start_ = true;
if (s >= nknown_states_) nknown_states_ = s + 1;
}
void SetFinal(StateId s, Weight weight) {
auto *state = cache_store_->GetMutableState(s);
state->SetFinal(std::move(weight));
static constexpr auto flags = kCacheFinal | kCacheRecent;
state->SetFlags(flags, flags);
}
// Disabled to ensure PushArc not AddArc is used in existing code
// TODO(sorenj): re-enable for backing store
#if 0
// AddArc adds a single arc to a state and does incremental cache
// book-keeping. For efficiency, prefer PushArc and SetArcs below
// when possible.
void AddArc(StateId s, const Arc &arc) {
auto *state = cache_store_->GetMutableState(s);
cache_store_->AddArc(state, arc);
if (arc.nextstate >= nknown_states_)
nknown_states_ = arc.nextstate + 1;
SetExpandedState(s);
static constexpr auto flags = kCacheArcs | kCacheRecent;
state->SetFlags(flags, flags);
}
#endif
// Adds a single arc to a state but delays cache book-keeping. SetArcs must
// be called when all PushArc calls at a state are complete. Do not mix with
// calls to AddArc.
void PushArc(StateId s, const Arc &arc) {
auto *state = cache_store_->GetMutableState(s);
state->PushArc(arc);
}
// Marks arcs of a state as cached and does cache book-keeping after all
// calls to PushArc have been completed. Do not mix with calls to AddArc.
void SetArcs(StateId s) {
auto *state = cache_store_->GetMutableState(s);
cache_store_->SetArcs(state);
const auto narcs = state->NumArcs();
for (size_t a = 0; a < narcs; ++a) {
const auto &arc = state->GetArc(a);
if (arc.nextstate >= nknown_states_) nknown_states_ = arc.nextstate + 1;
}
SetExpandedState(s);
static constexpr auto flags = kCacheArcs | kCacheRecent;
state->SetFlags(flags, flags);
}
void ReserveArcs(StateId s, size_t n) {
auto *state = cache_store_->GetMutableState(s);
state->ReserveArcs(n);
}
void DeleteArcs(StateId s) {
auto *state = cache_store_->GetMutableState(s);
cache_store_->DeleteArcs(state);
}
void DeleteArcs(StateId s, size_t n) {
auto *state = cache_store_->GetMutableState(s);
cache_store_->DeleteArcs(state, n);
}
void Clear() {
nknown_states_ = 0;
min_unexpanded_state_id_ = 0;
max_expanded_state_id_ = -1;
has_start_ = false;
cache_start_ = kNoStateId;
cache_store_->Clear();
}
// Is the start state cached?
bool HasStart() const {
if (!has_start_ && Properties(kError)) has_start_ = true;
return has_start_;
}
// Is the final weight of the state cached?
bool HasFinal(StateId s) const {
const auto *state = cache_store_->GetState(s);
if (state && state->Flags() & kCacheFinal) {
state->SetFlags(kCacheRecent, kCacheRecent);
return true;
} else {
return false;
}
}
// Are arcs of the state cached?
bool HasArcs(StateId s) const {
const auto *state = cache_store_->GetState(s);
if (state && state->Flags() & kCacheArcs) {
state->SetFlags(kCacheRecent, kCacheRecent);
return true;
} else {
return false;
}
}
StateId Start() const { return cache_start_; }
Weight Final(StateId s) const {
const auto *state = cache_store_->GetState(s);
return state->Final();
}
size_t NumArcs(StateId s) const {
const auto *state = cache_store_->GetState(s);
return state->NumArcs();
}
size_t NumInputEpsilons(StateId s) const {
const auto *state = cache_store_->GetState(s);
return state->NumInputEpsilons();
}
size_t NumOutputEpsilons(StateId s) const {
const auto *state = cache_store_->GetState(s);
return state->NumOutputEpsilons();
}
// Provides information needed for generic arc iterator.
void InitArcIterator(StateId s, ArcIteratorData<Arc> *data) const {
const auto *state = cache_store_->GetState(s);
data->base = nullptr;
data->narcs = state->NumArcs();
data->arcs = state->Arcs();
data->ref_count = state->MutableRefCount();
state->IncrRefCount();
}
// Number of known states.
StateId NumKnownStates() const { return nknown_states_; }
// Updates number of known states, taking into account the passed state ID.
void UpdateNumKnownStates(StateId s) {
if (s >= nknown_states_) nknown_states_ = s + 1;
}
// Finds the mininum never-expanded state ID.
StateId MinUnexpandedState() const {
while (min_unexpanded_state_id_ <= max_expanded_state_id_ &&
ExpandedState(min_unexpanded_state_id_)) {
++min_unexpanded_state_id_;
}
return min_unexpanded_state_id_;
}
// Returns maximum ever-expanded state ID.
StateId MaxExpandedState() const { return max_expanded_state_id_; }
void SetExpandedState(StateId s) {
if (s > max_expanded_state_id_) max_expanded_state_id_ = s;
if (s < min_unexpanded_state_id_) return;
if (s == min_unexpanded_state_id_) ++min_unexpanded_state_id_;
if (cache_gc_ || cache_limit_ == 0) {
if (expanded_states_.size() <= s) expanded_states_.resize(s + 1, false);
expanded_states_[s] = true;
}
}
bool ExpandedState(StateId s) const {
if (cache_gc_ || cache_limit_ == 0) {
return expanded_states_[s];
} else if (new_cache_store_) {
return cache_store_->GetState(s) != nullptr;
} else {
// If the cache was not created by this class, then the cached state needs
// to be inspected to update nknown_states_.
return false;
}
}
const CacheStore *GetCacheStore() const { return cache_store_; }
CacheStore *GetCacheStore() { return cache_store_; }
// Caching on/off switch, limit and size accessors.
bool GetCacheGc() const { return cache_gc_; }
size_t GetCacheLimit() const { return cache_limit_; }
private:
mutable bool has_start_; // Is the start state cached?
StateId cache_start_; // ID of start state.
StateId nknown_states_; // Number of known states.
std::vector<bool> expanded_states_; // States that have been expanded.
mutable StateId min_unexpanded_state_id_; // Minimum never-expanded state ID
mutable StateId max_expanded_state_id_; // Maximum ever-expanded state ID
bool cache_gc_; // GC enabled.
size_t cache_limit_; // Number of bytes allowed before GC.
CacheStore *cache_store_; // The store of cached states.
bool new_cache_store_; // Was the store was created by class?
bool own_cache_store_; // Is the store owned by class?
CacheBaseImpl &operator=(const CacheBaseImpl &impl) = delete;
};
// A CacheBaseImpl with the default cache state type.
template <class Arc>
class CacheImpl : public CacheBaseImpl<CacheState<Arc>> {
public:
using State = CacheState<Arc>;
CacheImpl() {}
explicit CacheImpl(const CacheOptions &opts)
: CacheBaseImpl<CacheState<Arc>>(opts) {}
CacheImpl(const CacheImpl<Arc> &impl, bool preserve_cache = false)
: CacheBaseImpl<State>(impl, preserve_cache) {}
private:
CacheImpl &operator=(const CacheImpl &impl) = delete;
};
} // namespace internal
// Use this to make a state iterator for a CacheBaseImpl-derived FST, which must
// have Arc and Store types defined. Note this iterator only returns those
// states reachable from the initial state, so consider implementing a
// class-specific one.
//
// This class may be derived from.
template <class FST>
class CacheStateIterator : public StateIteratorBase<typename FST::Arc> {
public:
using Arc = typename FST::Arc;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using Store = typename FST::Store;
using State = typename Store::State;
using Impl = internal::CacheBaseImpl<State, Store>;
CacheStateIterator(const FST &fst, Impl *impl)
: fst_(fst), impl_(impl), s_(0) {
fst_.Start(); // Forces start state.
}
bool Done() const final {
if (s_ < impl_->NumKnownStates()) return false;
for (StateId u = impl_->MinUnexpandedState(); u < impl_->NumKnownStates();
u = impl_->MinUnexpandedState()) {
// Forces state expansion.
ArcIterator<FST> aiter(fst_, u);
aiter.SetFlags(kArcValueFlags, kArcValueFlags | kArcNoCache);
for (; !aiter.Done(); aiter.Next()) {
impl_->UpdateNumKnownStates(aiter.Value().nextstate);
}
impl_->SetExpandedState(u);
if (s_ < impl_->NumKnownStates()) return false;
}
return true;
}
StateId Value() const final { return s_; }
void Next() final { ++s_; }
void Reset() final { s_ = 0; }
private:
const FST &fst_;
Impl *impl_;
StateId s_;
};
// Used to make an arc iterator for a CacheBaseImpl-derived FST, which must
// have Arc and State types defined.
template <class FST>
class CacheArcIterator {
public:
using Arc = typename FST::Arc;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using Store = typename FST::Store;
using State = typename Store::State;
using Impl = internal::CacheBaseImpl<State, Store>;
CacheArcIterator(Impl *impl, StateId s) : i_(0) {
state_ = impl->GetCacheStore()->GetMutableState(s);
state_->IncrRefCount();
}
~CacheArcIterator() { state_->DecrRefCount(); }
bool Done() const { return i_ >= state_->NumArcs(); }
const Arc &Value() const { return state_->GetArc(i_); }
void Next() { ++i_; }
size_t Position() const { return i_; }
void Reset() { i_ = 0; }
void Seek(size_t a) { i_ = a; }
constexpr uint32_t Flags() const { return kArcValueFlags; }
void SetFlags(uint32_t flags, uint32_t mask) {}
private:
const State *state_;
size_t i_;
CacheArcIterator(const CacheArcIterator &) = delete;
CacheArcIterator &operator=(const CacheArcIterator &) = delete;
};
// Use this to make a mutable arc iterator for a CacheBaseImpl-derived FST,
// which must have types Arc and Store defined.
template <class FST>
class CacheMutableArcIterator
: public MutableArcIteratorBase<typename FST::Arc> {
public:
using Arc = typename FST::Arc;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
using Store = typename FST::Store;
using State = typename Store::State;
using Impl = internal::CacheBaseImpl<State, Store>;
// User must call MutateCheck() in the constructor.
CacheMutableArcIterator(Impl *impl, StateId s) : i_(0), s_(s), impl_(impl) {
state_ = impl_->GetCacheStore()->GetMutableState(s_);
state_->IncrRefCount();
}
~CacheMutableArcIterator() override { state_->DecrRefCount(); }
bool Done() const final { return i_ >= state_->NumArcs(); }
const Arc &Value() const final { return state_->GetArc(i_); }
void Next() final { ++i_; }
size_t Position() const final { return i_; }
void Reset() final { i_ = 0; }
void Seek(size_t a) final { i_ = a; }
void SetValue(const Arc &arc) final { state_->SetArc(arc, i_); }
uint32_t Flags() const final { return kArcValueFlags; }
void SetFlags(uint32_t, uint32_t) final {}
private:
size_t i_;
StateId s_;
Impl *impl_;
State *state_;
CacheMutableArcIterator(const CacheMutableArcIterator &) = delete;
CacheMutableArcIterator &operator=(const CacheMutableArcIterator &) = delete;
};
// Wrap existing CacheStore implementation to use with ExpanderFst.
template <class CacheStore>
class ExpanderCacheStore {
public:
using State = typename CacheStore::State;
using Arc = typename CacheStore::Arc;
using StateId = typename Arc::StateId;
using Weight = typename Arc::Weight;
explicit ExpanderCacheStore(const CacheOptions &opts = CacheOptions())
: store_(opts) {}
template <class Expander>
State *FindOrExpand(Expander &expander, StateId s) { // NOLINT
auto *state = store_.GetMutableState(s);
if (state->Flags()) {
state->SetFlags(kCacheRecent, kCacheRecent);
} else {
StateBuilder builder(state);
expander.Expand(s, &builder);
state->SetFlags(kCacheFlags, kCacheFlags);
store_.SetArcs(state);
}
return state;
}
private:
CacheStore store_;
struct StateBuilder {
State *state;
explicit StateBuilder(State *state_) : state(state_) {}
void AddArc(const Arc &arc) { state->PushArc(arc); }
void SetFinal(Weight weight) { state->SetFinal(std::move(weight)); }
};
};
} // namespace fst
#endif // FST_CACHE_H_
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/script/fstscript.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
//
// The FST script interface permits users to interact with FSTs without knowing
// their arc type. It does this by mapping compile-time polymorphism (in the
// form of a arc-templated FST types) onto a shared virtual interface. It also
// supports arc extension via a DSO interface. Due to the overhead of virtual
// dispatch and registered function lookups, the script API is somewhat slower
// then library API provided by types like StdVectorFst, but has the advantage
// that it is designed not to crash (and to provide useful debugging
// information) upon common user errors like passing invalid indices or
// attempting comparison of incompatible FSTs. It is used both by the FST
// binaries and the Python extension.
//
// This header includes all of the FST script functionality.
#ifndef FST_SCRIPT_FSTSCRIPT_H_
#define FST_SCRIPT_FSTSCRIPT_H_
// Major classes
#include <fst/script/arciterator-class.h>
#include <fst/script/encodemapper-class.h>
#include <fst/script/fst-class.h>
#include <fst/script/stateiterator-class.h>
#include <fst/script/text-io.h>
#include <fst/script/weight-class.h>
// Flag-to-enum parsers.
#include <fst/script/getters.h>
// Templates like Operation<> and Apply<>.
#include <fst/script/script-impl.h>
// Operations.
#include <fst/script/arcsort.h>
#include <fst/script/closure.h>
#include <fst/script/compile.h>
#include <fst/script/compose.h>
#include <fst/script/concat.h>
#include <fst/script/connect.h>
#include <fst/script/convert.h>
#include <fst/script/decode.h>
#include <fst/script/determinize.h>
#include <fst/script/difference.h>
#include <fst/script/disambiguate.h>
#include <fst/script/draw.h>
#include <fst/script/encode.h>
#include <fst/script/epsnormalize.h>
#include <fst/script/equal.h>
#include <fst/script/equivalent.h>
#include <fst/script/info.h>
#include <fst/script/intersect.h>
#include <fst/script/invert.h>
#include <fst/script/isomorphic.h>
#include <fst/script/map.h>
#include <fst/script/minimize.h>
#include <fst/script/print.h>
#include <fst/script/project.h>
#include <fst/script/prune.h>
#include <fst/script/push.h>
#include <fst/script/randequivalent.h>
#include <fst/script/randgen.h>
#include <fst/script/relabel.h>
#include <fst/script/replace.h>
#include <fst/script/reverse.h>
#include <fst/script/reweight.h>
#include <fst/script/rmepsilon.h>
#include <fst/script/shortest-distance.h>
#include <fst/script/shortest-path.h>
#include <fst/script/synchronize.h>
#include <fst/script/topsort.h>
#include <fst/script/union.h>
#include <fst/script/verify.h>
// This class is necessary because registering each of the operations
// separately overfills the stack, as there's so many of them.
namespace fst {
namespace script {
template <class Arc>
class AllFstOperationsRegisterer {
public:
AllFstOperationsRegisterer() {
RegisterBatch1();
RegisterBatch2();
}
private:
void RegisterBatch1() {
REGISTER_FST_OPERATION(ArcSort, Arc, ArcSortArgs);
REGISTER_FST_OPERATION(Closure, Arc, ClosureArgs);
REGISTER_FST_OPERATION(CompileFstInternal, Arc, CompileFstArgs);
REGISTER_FST_OPERATION(Compose, Arc, ComposeArgs);
REGISTER_FST_OPERATION(Concat, Arc, ConcatArgs1);
REGISTER_FST_OPERATION(Concat, Arc, ConcatArgs2);
REGISTER_FST_OPERATION(Connect, Arc, MutableFstClass);
REGISTER_FST_OPERATION(Convert, Arc, ConvertArgs);
REGISTER_FST_OPERATION(Decode, Arc, DecodeArgs1);
REGISTER_FST_OPERATION(Decode, Arc, DecodeArgs2);
REGISTER_FST_OPERATION(Determinize, Arc, DeterminizeArgs);
REGISTER_FST_OPERATION(Difference, Arc, DifferenceArgs);
REGISTER_FST_OPERATION(Disambiguate, Arc, DisambiguateArgs);
REGISTER_FST_OPERATION(DrawFst, Arc, FstDrawerArgs);
REGISTER_FST_OPERATION(Encode, Arc, EncodeArgs1);
REGISTER_FST_OPERATION(Encode, Arc, EncodeArgs2);
REGISTER_FST_OPERATION(EpsNormalize, Arc, EpsNormalizeArgs);
REGISTER_FST_OPERATION(Equal, Arc, EqualArgs);
REGISTER_FST_OPERATION(Equivalent, Arc, EquivalentArgs);
REGISTER_FST_OPERATION(PrintFstInfo, Arc, InfoArgs);
REGISTER_FST_OPERATION(GetFstInfo, Arc, GetInfoArgs);
REGISTER_FST_OPERATION(InitArcIteratorClass, Arc,
InitArcIteratorClassArgs);
REGISTER_FST_OPERATION(InitEncodeMapperClass, Arc,
InitEncodeMapperClassArgs);
REGISTER_FST_OPERATION(InitMutableArcIteratorClass, Arc,
InitMutableArcIteratorClassArgs);
REGISTER_FST_OPERATION(InitStateIteratorClass, Arc,
InitStateIteratorClassArgs);
}
void RegisterBatch2() {
REGISTER_FST_OPERATION(Intersect, Arc, IntersectArgs);
REGISTER_FST_OPERATION(Invert, Arc, MutableFstClass);
REGISTER_FST_OPERATION(Map, Arc, MapArgs);
REGISTER_FST_OPERATION(Minimize, Arc, MinimizeArgs);
REGISTER_FST_OPERATION(PrintFst, Arc, FstPrinterArgs);
REGISTER_FST_OPERATION(Project, Arc, ProjectArgs);
REGISTER_FST_OPERATION(Prune, Arc, PruneArgs1);
REGISTER_FST_OPERATION(Prune, Arc, PruneArgs2);
REGISTER_FST_OPERATION(Push, Arc, PushArgs1);
REGISTER_FST_OPERATION(Push, Arc, PushArgs2);
REGISTER_FST_OPERATION(RandEquivalent, Arc, RandEquivalentArgs);
REGISTER_FST_OPERATION(RandGen, Arc, RandGenArgs);
REGISTER_FST_OPERATION(Relabel, Arc, RelabelArgs1);
REGISTER_FST_OPERATION(Relabel, Arc, RelabelArgs2);
REGISTER_FST_OPERATION(Replace, Arc, ReplaceArgs);
REGISTER_FST_OPERATION(Reverse, Arc, ReverseArgs);
REGISTER_FST_OPERATION(Reweight, Arc, ReweightArgs);
REGISTER_FST_OPERATION(RmEpsilon, Arc, RmEpsilonArgs);
REGISTER_FST_OPERATION(ShortestDistance, Arc, ShortestDistanceArgs1);
REGISTER_FST_OPERATION(ShortestDistance, Arc, ShortestDistanceArgs2);
REGISTER_FST_OPERATION(ShortestPath, Arc, ShortestPathArgs);
REGISTER_FST_OPERATION(Synchronize, Arc, SynchronizeArgs);
REGISTER_FST_OPERATION(TopSort, Arc, TopSortArgs);
REGISTER_FST_OPERATION(Union, Arc, UnionArgs);
REGISTER_FST_OPERATION(Verify, Arc, VerifyArgs);
}
};
} // namespace script
} // namespace fst
#define REGISTER_FST_OPERATIONS(Arc) \
AllFstOperationsRegisterer<Arc> register_all_fst_operations##Arc;
#endif // FST_SCRIPT_FSTSCRIPT_H_
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/script/minimize.h | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#ifndef FST_SCRIPT_MINIMIZE_H_
#define FST_SCRIPT_MINIMIZE_H_
#include <tuple>
#include <fst/minimize.h>
#include <fst/script/fst-class.h>
namespace fst {
namespace script {
using MinimizeArgs = std::tuple<MutableFstClass *, MutableFstClass *, float,
bool>;
template <class Arc>
void Minimize(MinimizeArgs *args) {
MutableFst<Arc> *ofst1 = std::get<0>(*args)->GetMutableFst<Arc>();
MutableFst<Arc> *ofst2 = (std::get<1>(*args) ?
std::get<1>(*args)->GetMutableFst<Arc>() :
nullptr);
Minimize(ofst1, ofst2, std::get<2>(*args), std::get<3>(*args));
}
void Minimize(MutableFstClass *ofst1, MutableFstClass *ofst2 = nullptr,
float delta = kShortestDelta, bool allow_nondet = false);
} // namespace script
} // namespace fst
#endif // FST_SCRIPT_MINIMIZE_H_
| 0 |
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src | coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/bin/fstdeterminize.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/flags.h>
#include <fst/fst.h>
#include <fst/weight.h>
DEFINE_double(delta, fst::kDelta, "Comparison/quantization delta");
DEFINE_string(weight, "", "Weight threshold");
DEFINE_int64(nstate, fst::kNoStateId, "State number threshold");
DEFINE_int64(subsequential_label, 0,
"Input label of arc corresponding to residual final output when"
" producing a subsequential transducer");
DEFINE_string(det_type, "functional",
"Type of determinization: \"functional\", "
"\"nonfunctional\", \"disambiguate\"");
DEFINE_bool(increment_subsequential_label, false,
"Increment subsequential_label to obtain distinct labels for "
" subsequential arcs at a given state");
int fstdeterminize_main(int argc, char **argv);
int main(int argc, char **argv) { return fstdeterminize_main(argc, argv); }
| 0 |
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src | coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/script/stateiterator-class.cc | // See www.openfst.org for extensive documentation on this weighted
// finite-state transducer library.
#include <fst/script/script-impl.h>
#include <fst/script/stateiterator-class.h>
namespace fst {
namespace script {
StateIteratorClass::StateIteratorClass(const FstClass &fst) : impl_(nullptr) {
InitStateIteratorClassArgs args(fst, this);
Apply<Operation<InitStateIteratorClassArgs>>("InitStateIteratorClass",
fst.ArcType(), &args);
}
REGISTER_FST_OPERATION(InitStateIteratorClass, StdArc,
InitStateIteratorClassArgs);
REGISTER_FST_OPERATION(InitStateIteratorClass, LogArc,
InitStateIteratorClassArgs);
REGISTER_FST_OPERATION(InitStateIteratorClass, Log64Arc,
InitStateIteratorClassArgs);
} // namespace script
} // namespace fst
| 0 |