Skip to content
This repository was archived by the owner on Jan 23, 2023. It is now read-only.

Typo #18684

Merged
merged 10 commits into from
Jun 28, 2018
Merged

Typo #18684

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/binder/assembly.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ namespace BINDER_SPACE
{
namespace
{
BOOL IsPlatformArchicture(PEKIND kArchitecture)
BOOL IsPlatformArchitecture(PEKIND kArchitecture)
{
return ((kArchitecture != peMSIL) && (kArchitecture != peNone));
}
Expand Down Expand Up @@ -279,7 +279,7 @@ namespace BINDER_SPACE
/* static */
BOOL Assembly::IsValidArchitecture(PEKIND kArchitecture)
{
if (!IsPlatformArchicture(kArchitecture))
if (!IsPlatformArchitecture(kArchitecture))
return TRUE;

return (kArchitecture == GetSystemArchitecture());
Expand Down
6 changes: 3 additions & 3 deletions src/debug/ee/debugger.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ void Debugger::DoNotCallDirectlyPrivateLock(void)
{
WRAPPER_NO_CONTRACT;

LOG((LF_CORDB,LL_INFO10000, "D::Lock aquire attempt by 0x%x\n",
LOG((LF_CORDB,LL_INFO10000, "D::Lock acquire attempt by 0x%x\n",
GetCurrentThreadId()));

// Debugger lock is larger than both Controller & debugger-data locks.
Expand Down Expand Up @@ -426,7 +426,7 @@ void Debugger::DoNotCallDirectlyPrivateLock(void)

if (m_mutexCount == 1)
{
LOG((LF_CORDB,LL_INFO10000, "D::Lock aquired by 0x%x\n", m_mutexOwner));
LOG((LF_CORDB,LL_INFO10000, "D::Lock acquired by 0x%x\n", m_mutexOwner));
}
#endif

Expand Down Expand Up @@ -4990,7 +4990,7 @@ HRESULT Debugger::MapAndBindFunctionPatches(DebuggerJitInfo *djiNew,
// The DJI gets deleted as part of the Unbind/Rebind process in MovedCode.
// This is to signal that we should not skip here.
// <NICE> under exactly what scenarios (EnC, code pitching etc.) will this apply?... </NICE>
// <NICE> can't we be a little clearer about why we don't want to bind the patch in this arcance situation?</NICE>
// <NICE> can't we be a little clearer about why we don't want to bind the patch in this arcane situation?</NICE>
if (dcp->HasDJI() && !dcp->IsBreakpointPatch() && !dcp->IsStepperPatch())
{
LOG((LF_CORDB, LL_INFO10000, "Neither stepper nor BP but we have valid a DJI (i.e. the DJI hasn't been deleted as part of the Unbind/MovedCode/Rebind mess)! - getting next patch!\n"));
Expand Down
2 changes: 1 addition & 1 deletion src/jit/lclvars.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5237,7 +5237,7 @@ int Compiler::lvaAssignVirtualFrameOffsetToArg(unsigned lclNum,
if (varDsc->lvType == TYP_STRUCT && varDsc->lvOtherArgReg >= MAX_REG_ARG && varDsc->lvOtherArgReg != REG_NA)
{
// This is a split struct. It will account for an extra (8 bytes)
// of allignment.
// of alignment.
varDsc->lvStkOffs += TARGET_POINTER_SIZE;
argOffs += TARGET_POINTER_SIZE;
}
Expand Down
2 changes: 1 addition & 1 deletion src/jit/morph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4626,7 +4626,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry
{
assert(arg->OperGet() == GT_LCL_VAR);

// We need to construct a `GT_OBJ` node for the argmuent,
// We need to construct a `GT_OBJ` node for the argument,
// so we need to get the address of the lclVar.
lcl = arg->AsLclVarCommon();
}
Expand Down
2 changes: 1 addition & 1 deletion src/pal/src/map/map.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1661,7 +1661,7 @@ static INT MAPFileMapToMmapFlags( DWORD flags )
else if ( FILE_MAP_WRITE == flags )
{
TRACE( "FILE_MAP_WRITE\n" );
/* The limitation of x86 archetecture
/* The limitation of x86 architecture
means you cant have writable but not readable
page. In Windows maps of FILE_MAP_WRITE can still be
read from. */
Expand Down
2 changes: 1 addition & 1 deletion src/vm/gccover.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -913,7 +913,7 @@ bool replaceInterruptibleRangesWithGcStressInstr (UINT32 startOffset, UINT32 sto
// We cannot insert GCStress instruction at this call
// For arm64 & arm (R2R) call to jithelpers happens via a stub.
// For other architectures call does not happen via stub.
// For other architecures we can get the target directly by calling getTargetOfCall().
// For other architectures we can get the target directly by calling getTargetOfCall().
// This is not the case for arm64/arm so need to decode the stub
// instruction to find the actual jithelper target.
// For other architecture we detect call to JIT_RareDisableHelper
Expand Down
2 changes: 1 addition & 1 deletion src/vm/methodtablebuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7451,7 +7451,7 @@ MethodTableBuilder::PlaceInterfaceMethods()
else
{
// Iterate through the methods on the interface, and if they have a slot which was filled in
// on an equivalent interface inherited from the parent fill in the approrpriate slot.
// on an equivalent interface inherited from the parent fill in the appropriate slot.
// This code path is only used when there is an implicit implementation of an interface
// that was not implemented on a parent type, but there was an equivalent interface implemented
// on a parent type.
Expand Down
2 changes: 1 addition & 1 deletion src/vm/reflectioninvocation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2272,7 +2272,7 @@ void ExecuteCodeWithGuaranteedCleanupHelper (ECWGC_GC *gc)
// ExecuteCodeWithGuaranteedCleanup ensures that we will call the backout code delegate even if an SO occurs. We do this by calling the
// try delegate from within an EX_TRY/EX_CATCH block that will catch any thrown exceptions and thus cause the stack to be unwound. This
// guarantees that the backout delegate is called with at least DEFAULT_ENTRY_PROBE_SIZE pages of stack. After the backout delegate is called,
// we re-raise any exceptions that occurred inside the try delegate. Note that any CER that uses large or arbitraty amounts of stack in
// we re-raise any exceptions that occurred inside the try delegate. Note that any CER that uses large or arbitrary amounts of stack in
// it's try block must use ExecuteCodeWithGuaranteedCleanup.
//
// ExecuteCodeWithGuaranteedCleanup also guarantees that the backount code will be run before any filters higher up on the stack. This
Expand Down
2 changes: 1 addition & 1 deletion src/vm/threads.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6777,7 +6777,7 @@ HRESULT Thread::CLRSetThreadStackGuarantee(SetThreadStackGuaranteeScope fScope)
//
// -plus we might need some more for debugger EH dispatch, Watson, etc...
// -also need to take into account that we can lose up to 1 page of the guard region
// -additionally, we need to provide some region to hosts to allow for lock aquisition in a hosted scenario
// -additionally, we need to provide some region to hosts to allow for lock acquisition in a hosted scenario
//
EXTRA_PAGES = 3;
INDEBUG(EXTRA_PAGES += 1);
Expand Down