Skip to content

Commit 79a258f

Browse files
Chilleefacebook-github-bot
authored andcommitted
s/foward/forward/g (pytorch#58497)
Summary: Annoying typo. Prompted by these profiling results: pytorch#56419 (comment) Pull Request resolved: pytorch#58497 Reviewed By: malfet Differential Revision: D28521081 Pulled By: Chillee fbshipit-source-id: ab91a2e167dd7d3387fd56106a6cff81f7a32f10
1 parent ccad77a commit 79a258f

File tree

10 files changed

+14
-14
lines changed

10 files changed

+14
-14
lines changed

aten/src/ATen/native/mkl/SpectralOps.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,7 @@ Tensor _fft_c2r_mkl(const Tensor& self, IntArrayRef dim, int64_t normalization,
389389
auto input = self;
390390
if (dim.size() > 1) {
391391
auto c2c_dims = dim.slice(0, dim.size() - 1);
392-
input = _fft_c2c_mkl(self, c2c_dims, normalization, /*foward=*/false);
392+
input = _fft_c2c_mkl(self, c2c_dims, normalization, /*forward=*/false);
393393
dim = dim.slice(dim.size() - 1);
394394
}
395395

benchmarks/distributed/rpc/parameter_server/trainers/TrainerBase.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ class TrainerBase(ABC):
99

1010
BATCH_LEVEL_METRIC = "batch_level_metric"
1111
BATCH_ALL = "batch_all"
12-
FORWARD_METRIC = "foward_metric"
12+
FORWARD_METRIC = "forward_metric"
1313
FORWARD_PASS = "forward_pass"
1414
BACKWARD_METRIC = "backward_metric"
1515
BACKWARD = "backward"

benchmarks/tensorexpr/nnc.png

53 KB
Loading

docs/source/fx.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,7 @@ Debugging the Generated Code
474474

475475
Because FX generates the ``forward()`` function on :class:`GraphModule`\s, using
476476
traditional debugging techniques like ``print`` statements or ``pdb`` is
477-
not as straightfoward. Luckily, we have several techniques we can use
477+
not as straightforward. Luckily, we have several techniques we can use
478478
for debugging the generated code.
479479

480480
Use ``pdb``

test/jit/test_hooks.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import torch
77
from jit.test_hooks_modules import (
8-
ModuleDirectFowardSubmodCall, ModuleForwardSingleInput,
8+
ModuleDirectforwardSubmodCall, ModuleForwardSingleInput,
99
ModuleForwardTupleInput, create_forward_tuple_input,
1010
create_module_forward_multiple_inputs, create_module_forward_single_input,
1111
create_module_hook_return_nothing,
@@ -184,7 +184,7 @@ def forward_hook(self, input: Tuple[str], output: str):
184184
self.assertNotEqual(m_scripted("a"), m_scripted.forward("a"))
185185

186186
def test_submodule_direct_forward_invocation(self):
187-
m_submod_forward_call = ModuleDirectFowardSubmodCall(
187+
m_submod_forward_call = ModuleDirectforwardSubmodCall(
188188
"outer_mod_name", "inner_mod_name"
189189
)
190190
m_submod_call = ModuleForwardSingleInput("outer_mod_name", "inner_mod_name")

test/jit/test_hooks_modules.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def forward(self, input: str):
4646
return self.submodule(input)
4747

4848

49-
class ModuleDirectFowardSubmodCall(torch.nn.Module):
49+
class ModuleDirectforwardSubmodCall(torch.nn.Module):
5050
def __init__(self, name: str, submodule_name: str):
5151
super().__init__()
5252
self.name = name

test/test_mobile_optimizer.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -254,12 +254,12 @@ def foo(self, x):
254254
FileCheck().check_count("prim::CallMethod[name=\"forward\"]", 2, exactly=True) \
255255
.run(bn_no_forward_scripted_module.foo.graph)
256256

257-
bn_fold_no_foward_scripted_module = optimize_for_mobile(bn_no_forward_scripted_module, preserved_methods=['foo'])
258-
self.assertEqual(len(torch.jit.export_opnames(bn_fold_no_foward_scripted_module)), 1)
257+
bn_fold_no_forward_scripted_module = optimize_for_mobile(bn_no_forward_scripted_module, preserved_methods=['foo'])
258+
self.assertEqual(len(torch.jit.export_opnames(bn_fold_no_forward_scripted_module)), 1)
259259
bn_input = torch.rand(1, 1, 6, 6)
260260
torch.testing.assert_allclose(
261261
bn_no_forward_scripted_module.foo(bn_input),
262-
bn_fold_no_foward_scripted_module.foo(bn_input),
262+
bn_fold_no_forward_scripted_module.foo(bn_input),
263263
rtol=1e-2,
264264
atol=1e-3)
265265

torch/csrc/jit/docs/serialization.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,7 @@ object around in C++, all its code will stay around and methods will be
374374
invokable.
375375

376376
**`Module`**: A view over a `ClassType` and the `Object` that holds its state.
377-
Also responsible for turning unqualified names (e.g. `foward()`) into
377+
Also responsible for turning unqualified names (e.g. `forward()`) into
378378
qualified ones for lookup in the owning `CompilationUnit` (e.g.
379379
`__torch__.MyModule.forward`). Owns the `Object`, which transitively owns the
380380
`CompilationUnit`.

torch/csrc/jit/runtime/graph_executor.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -370,8 +370,8 @@ struct DifferentiableGraphBackward : public autograd::Node {
370370
// to the output Variables if present.
371371
struct DifferentiableGraphOp {
372372
DifferentiableGraphOp(Gradient grad)
373-
: f_ptr(std::make_shared<GraphExecutor>(grad.f, "<foward op>")),
374-
legacy_f(grad.f, "<foward op>"),
373+
: f_ptr(std::make_shared<GraphExecutor>(grad.f, "<forward op>")),
374+
legacy_f(grad.f, "<forward op>"),
375375
grad(std::move(grad)),
376376
grad_executor(this->grad.df, "<backward op>"),
377377
num_inputs(this->grad.f->inputs().size()),

torch/testing/_internal/distributed/nn/api/remote_module_test.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -496,7 +496,7 @@ def test_send_remote_module_over_the_wire(self):
496496
self.assertFalse(attrs["is_scriptable"])
497497

498498
# Test the installed methods on worker1's can be initiated by worker2 over RPC layer.
499-
# NOTE: In practice a remote module should be directly stored on the worker that runs ``forward``` or ``foward_async``,
499+
# NOTE: In practice a remote module should be directly stored on the worker that runs ``forward``` or ``forward_async``,
500500
# not have another worker to initiate forward over the RPC layer.
501501
args = (torch.ones(1), 2, "3")
502502
ret1 = rpc.rpc_sync(dst_worker2_name, remote_forward, (remote_module, args))
@@ -537,7 +537,7 @@ def test_send_remote_module_over_the_wire_script(self):
537537
self.assertFalse(attrs["is_scriptable"])
538538

539539
# Test the installed methods on worker1's can be initiated by worker2 over RPC layer.
540-
# NOTE: In practice a remote module should be directly stored on the worker that runs ``forward``` or ``foward_async``,
540+
# NOTE: In practice a remote module should be directly stored on the worker that runs ``forward``` or ``forward_async``,
541541
# not have another worker to initiate forward over the RPC layer.
542542
args = (torch.ones(1), 2, "3")
543543
ret1 = rpc.rpc_sync(dst_worker2_name, remote_forward, (remote_module, args))

0 commit comments

Comments
 (0)