Skip to content

Commit

Permalink
Fix typos (microsoft#2212)
Browse files Browse the repository at this point in the history
  • Loading branch information
taehoonlee authored and cha-zhang committed Sep 17, 2017
1 parent cd6748f commit 75fa318
Show file tree
Hide file tree
Showing 11 changed files with 12 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1795,7 +1795,7 @@ autoAdjustMinibatch
Default value is false.
Adaptive minibatch sizing will begin on epochs starting after user minibatch
sizes explicitly specified are complete.
For example if the userspecifed minibatchSize=256:1024, then 256 and 1024are
For example if the userspecified minibatchSize=256:1024, then 256 and 1024are
used in the first 2 Epochs and adaptive minibatchsizing is used afterwards

\end_layout
Expand Down
2 changes: 1 addition & 1 deletion Examples/Speech/Miscellaneous/AMI/scripts/train_nnet.sh
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ cn_command="$cn_command $cntk_train_opts DeviceNumber=$device"
cn_command="$cn_command command=TrainModel"
$cmd $parallel_opts JOB=1:1 $dir/log/cntk.train.JOB.log $cn_command || exit 1;

echo "$0 successfuly finished.. $dir"
echo "$0 successfully finished.. $dir"

sleep 3
exit 0
2 changes: 1 addition & 1 deletion Examples/Speech/Miscellaneous/AMI/train_cntk2.sh
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ EOF
## training command ##
$cn_gpu configFile=${expdir}/Base.config configFile=${expdir}/CNTK2.cntk DeviceNumber=0 action=TrainDNN ndlfile=$ndlfile

echo "$0 successfuly finished.. $dir"
echo "$0 successfully finished.. $dir"

fi

Expand Down
2 changes: 1 addition & 1 deletion Source/ComputationNetworkLib/ComputationNetwork.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -812,7 +812,7 @@ void ComputationNetwork::DescribeNetworkUsingDot(list<ComputationArc>& arcs,
fstream << FormSpecialNodes(dotcfg.m_featuresStyle, m_featureNodes);
// labels
fstream << FormSpecialNodes(dotcfg.m_labelsStyle, m_labelNodes);
// critera
// criteria
fstream << FormSpecialNodes(dotcfg.m_CriteriaStyle, m_criterionNodes);
// pre-compute nodes
fstream << FormSpecialNodes(dotcfg.m_PrecomputingNodeStyle, preComputedNodes);
Expand Down
2 changes: 1 addition & 1 deletion Source/ComputationNetworkLib/ComputationNetwork.h
Original file line number Diff line number Diff line change
Expand Up @@ -795,7 +795,7 @@ class ComputationNetwork :
// determine nodes to evaluate
std::vector<ComputationNodeBasePtr> evalNodes;

set<ComputationNodeBasePtr> criteriaLogged; // (keeps track ot duplicates to avoid we don't double-log critera)
set<ComputationNodeBasePtr> criteriaLogged; // (keeps track ot duplicates to avoid we don't double-log criteria)
if (evalNodeNames.size() == 0)
{
fprintf(stderr, "evalNodeNames are not specified, using all the default evalnodes and training criterion nodes.\n");
Expand Down
4 changes: 2 additions & 2 deletions Source/ComputationNetworkLib/SpecialPurposeNodes.h
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ class SequenceWithSoftmaxNode : public ComputationNodeNonLooping<ElemType>, publ
{
}

// compute gradients to input observations, the weights to the observations, and the class log posterior probabilites
// compute gradients to input observations, the weights to the observations, and the class log posterior probabilities
virtual void BackpropToNonLooping(size_t inputIndex) override
{
// auto t_start_time = Timer::MilliSecondElapsed();
Expand Down Expand Up @@ -795,7 +795,7 @@ class ForwardBackwardNode : public ComputationNodeNonLooping<ElemType>, public
AttachInputsFromConfig(configp, this->GetExpectedNumInputs());
}

// Compute gradients to input observations, the weights to the observations, and the class log posterior probabilites
// Compute gradients to input observations, the weights to the observations, and the class log posterior probabilities
virtual void BackpropToNonLooping(size_t inputIndex) override
{
// Left node must be a scalar
Expand Down
2 changes: 1 addition & 1 deletion Source/ComputationNetworkLib/TrainingNodes.h
Original file line number Diff line number Diff line change
Expand Up @@ -1524,7 +1524,7 @@ class ClassBasedCrossEntropyWithSoftmaxNode : public ComputationNodeNonLooping /
return sz;
}

// compute gradients to input observations, the weights to the observations, and the class log posterior probabilites
// compute gradients to input observations, the weights to the observations, and the class log posterior probabilities
virtual void BackpropToNonLooping(size_t inputIndex) override
{
// this should never be called for input[0], which is controlled through learningRateMultiplier == 0
Expand Down
2 changes: 1 addition & 1 deletion Source/Math/GPUTensor.cu
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ struct TensorOps
};

// ----------------------------------------------------------------------------
// Function to update an aggregate value for the specifed reduction operation
// Function to update an aggregate value for the specified reduction operation
// ----------------------------------------------------------------------------

template <typename ElemType> __device__ ElemType AggregateNeutralValue(ElementWiseOperator op)
Expand Down
2 changes: 1 addition & 1 deletion Source/Readers/ReaderLib/IndexBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ TextInputIndexBuilder::TextInputIndexBuilder(const FileWrapper& input)

/*virtual*/ wstring TextInputIndexBuilder::GetCacheFilename() /*override*/
{
// What follows are all the options that affect the outcome of indexing (i.e., specifing a 'main' stream
// What follows are all the options that affect the outcome of indexing (i.e., specifying a 'main' stream
// using the definesMBsize flag will affect the sequence length in terms of number of samples).
// We could compute a (SHA1) hash of all these options and add it instead to the filename (+ embed the
// values themselves into the cache header), but given that there're only a few of them, encoding them
Expand Down
2 changes: 1 addition & 1 deletion Tests/EndToEndTests/TestDriver.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,7 @@ def runCommand(args):
pyPaths['py35'] = convertPythonPath(args.py35_paths)
if args.py36_paths:
pyPaths['py36'] = convertPythonPath(args.py36_paths)
# If no Python was explicitly specifed, go against current.
# If no Python was explicitly specified, go against current.
if not pyPaths:
pyPaths['py'] = ''

Expand Down
2 changes: 1 addition & 1 deletion bindings/python/cntk/ops/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2757,7 +2757,7 @@ def random_sample_inclusion_frequency(
seed = SentinelValueForAutoSelectRandomSeed,
name=''):
'''
For weighted sampling with the specifed sample size (`num_samples`)
For weighted sampling with the specified sample size (`num_samples`)
this operation computes the expected number of occurrences of each class
in the sampled set. In case of sampling without replacement
the result is only an estimate which might be quite rough in the
Expand Down

0 comments on commit 75fa318

Please sign in to comment.