Skip to content

Commit 28f6ae2

Browse files
cyyeverpytorchmergebot
authored andcommitted
[9/N] Replace c10::optional with std::optional (pytorch#130674)
Follows pytorch#130509 Pull Request resolved: pytorch#130674 Approved by: https://github.com/Skylion007
1 parent 774ca93 commit 28f6ae2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+230
-230
lines changed

android/pytorch_android/src/main/cpp/pytorch_jni_jit.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ class PytorchJni : public facebook::jni::HybridClass<PytorchJni> {
119119
}
120120
deviceType_ = deviceJniCodeToDeviceType(device);
121121
module_ = torch::jit::load(
122-
std::move(modelPath->toStdString()), c10::nullopt, extra_files);
122+
std::move(modelPath->toStdString()), std::nullopt, extra_files);
123123
if (has_extra) {
124124
static auto putMethod =
125125
facebook::jni::JMap<facebook::jni::JString, facebook::jni::JString>::

android/pytorch_android/src/main/cpp/pytorch_jni_lite.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,9 @@ class PytorchJni : public facebook::jni::HybridClass<PytorchJni> {
8484
}
8585
deviceType_ = deviceJniCodeToDeviceType(device);
8686
module_ = torch::jit::_load_for_mobile(
87-
std::move(modelPath->toStdString()), c10::nullopt, extra_files);
87+
std::move(modelPath->toStdString()), std::nullopt, extra_files);
8888
torch::jit::_load_extra_only_for_mobile(
89-
std::move(modelPath->toStdString()), c10::nullopt, extra_files);
89+
std::move(modelPath->toStdString()), std::nullopt, extra_files);
9090
if (has_extra) {
9191
static auto putMethod =
9292
facebook::jni::JMap<facebook::jni::JString, facebook::jni::JString>::

aten/src/ATen/native/cuda/MultinomialKernel.cu

+12-12
Original file line numberDiff line numberDiff line change
@@ -386,27 +386,27 @@ void multinomial_with_replacement_kernel_impl(
386386
// for subsequent samples in this space
387387
Tensor origDist = native::empty_like(
388388
self_v,
389-
c10::nullopt /* dtype */,
390-
c10::nullopt /* layout */,
391-
c10::nullopt /* device */,
392-
c10::nullopt /* pin_memory */,
389+
std::nullopt /* dtype */,
390+
std::nullopt /* layout */,
391+
std::nullopt /* device */,
392+
std::nullopt /* pin_memory */,
393393
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
394394
origDist.copy_(self_v);
395395
396396
Tensor normDist = native::empty_like(
397397
self_v,
398-
c10::nullopt /* dtype */,
399-
c10::nullopt /* layout */,
400-
c10::nullopt /* device */,
401-
c10::nullopt /* pin_memory */,
398+
std::nullopt /* dtype */,
399+
std::nullopt /* layout */,
400+
std::nullopt /* device */,
401+
std::nullopt /* pin_memory */,
402402
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
403403
404404
Tensor prefixSum = native::empty_like(
405405
self_v,
406-
c10::nullopt /* dtype */,
407-
c10::nullopt /* layout */,
408-
c10::nullopt /* device */,
409-
c10::nullopt /* pin_memory */,
406+
std::nullopt /* dtype */,
407+
std::nullopt /* layout */,
408+
std::nullopt /* device */,
409+
std::nullopt /* pin_memory */,
410410
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
411411
412412
// Renorm along rows

aten/src/ATen/native/cuda/SummaryOps.cu

+6-6
Original file line numberDiff line numberDiff line change
@@ -257,9 +257,9 @@ Tensor _bincount_cuda_template(
257257
return at::zeros(
258258
{minlength},
259259
kLong,
260-
c10::nullopt /* layout */,
260+
std::nullopt /* layout */,
261261
kCUDA,
262-
c10::nullopt /* pin_memory */);
262+
std::nullopt /* pin_memory */);
263263
}
264264
if (self.dim() != 1 ||
265265
(!std::is_same<input_t, uint8_t>::value &&
@@ -295,9 +295,9 @@ Tensor _bincount_cuda_template(
295295
output = at::zeros(
296296
{nbins},
297297
kLong,
298-
c10::nullopt /* layout */,
298+
std::nullopt /* layout */,
299299
DeviceType::CUDA,
300-
c10::nullopt /* pin_memory */);
300+
std::nullopt /* pin_memory */);
301301
cuda::CUDA_tensor_histogram<int64_t, input_t, false>(
302302
output, self, weights, nbins, minvalue, maxvalue);
303303
}
@@ -317,9 +317,9 @@ Tensor _histc_cuda_template(
317317
Tensor output = at::zeros(
318318
{nbins},
319319
self.scalar_type(),
320-
c10::nullopt /* layout */,
320+
std::nullopt /* layout */,
321321
DeviceType::CUDA,
322-
c10::nullopt /* pin_memory */);
322+
std::nullopt /* pin_memory */);
323323
input_t minvalue = min;
324324
input_t maxvalue = max;
325325
if (min == max && self.numel() > 0) {

aten/src/ATen/native/cuda/layer_norm_kernel.cu

+24-24
Original file line numberDiff line numberDiff line change
@@ -1354,10 +1354,10 @@ std::tuple<Tensor, Tensor, Tensor> layer_norm_cuda(
13541354

13551355
Tensor Y = at::native::empty_like(
13561356
*X,
1357-
c10::nullopt /* dtype */,
1358-
c10::nullopt /* layout */,
1359-
c10::nullopt /* device */,
1360-
c10::nullopt /* pin_memory */,
1357+
std::nullopt /* dtype */,
1358+
std::nullopt /* layout */,
1359+
std::nullopt /* device */,
1360+
std::nullopt /* pin_memory */,
13611361
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
13621362
auto acc_type = at::toAccumulateType(input.scalar_type(), /*is_cuda=*/true);
13631363
Tensor mean = at::empty({M}, X->options().dtype(acc_type));
@@ -1414,42 +1414,42 @@ std::tuple<Tensor, Tensor, Tensor> layer_norm_backward_cuda(
14141414
if (grad_input_mask[0]) {
14151415
dX = at::native::empty_like(
14161416
*X,
1417-
c10::nullopt /* dtype */,
1418-
c10::nullopt /* layout */,
1419-
c10::nullopt /* device */,
1420-
c10::nullopt /* pin_memory */,
1417+
std::nullopt /* dtype */,
1418+
std::nullopt /* layout */,
1419+
std::nullopt /* device */,
1420+
std::nullopt /* pin_memory */,
14211421
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
14221422
}
14231423
if (grad_input_mask[1]) {
14241424
dgamma = M > 0 ? at::native::empty_like(
14251425
*gamma,
1426-
c10::nullopt /* dtype */,
1427-
c10::nullopt /* layout */,
1428-
c10::nullopt /* device */,
1429-
c10::nullopt /* pin_memory */,
1426+
std::nullopt /* dtype */,
1427+
std::nullopt /* layout */,
1428+
std::nullopt /* device */,
1429+
std::nullopt /* pin_memory */,
14301430
LEGACY_CONTIGUOUS_MEMORY_FORMAT)
14311431
: at::native::zeros_like(
14321432
*gamma,
1433-
c10::nullopt /* dtype */,
1434-
c10::nullopt /* layout */,
1435-
c10::nullopt /* device */,
1436-
c10::nullopt /* pin_memory */,
1433+
std::nullopt /* dtype */,
1434+
std::nullopt /* layout */,
1435+
std::nullopt /* device */,
1436+
std::nullopt /* pin_memory */,
14371437
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
14381438
}
14391439
if (grad_input_mask[2]) {
14401440
dbeta = M > 0 ? at::native::empty_like(
14411441
*beta,
1442-
c10::nullopt /* dtype */,
1443-
c10::nullopt /* layout */,
1444-
c10::nullopt /* device */,
1445-
c10::nullopt /* pin_memory */,
1442+
std::nullopt /* dtype */,
1443+
std::nullopt /* layout */,
1444+
std::nullopt /* device */,
1445+
std::nullopt /* pin_memory */,
14461446
LEGACY_CONTIGUOUS_MEMORY_FORMAT)
14471447
: at::native::zeros_like(
14481448
*beta,
1449-
c10::nullopt /* dtype */,
1450-
c10::nullopt /* layout */,
1451-
c10::nullopt /* device */,
1452-
c10::nullopt /* pin_memory */,
1449+
std::nullopt /* dtype */,
1450+
std::nullopt /* layout */,
1451+
std::nullopt /* device */,
1452+
std::nullopt /* pin_memory */,
14531453
LEGACY_CONTIGUOUS_MEMORY_FORMAT);
14541454
}
14551455
if (M > 0 && N > 0) {

benchmarks/instruction_counts/definitions/standard.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,9 @@
4343
at::empty({0}, at::kFloat); // implicit conversion
4444
4545
// @Faithful overload
46-
at::empty({0}, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
47-
at::empty({0}, at::kFloat, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
48-
at::empty({0}, optional_float, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
46+
at::empty({0}, std::nullopt, std::nullopt, std::nullopt, std::nullopt, std::nullopt);
47+
at::empty({0}, at::kFloat, std::nullopt, std::nullopt, std::nullopt, std::nullopt);
48+
at::empty({0}, optional_float, std::nullopt, std::nullopt, std::nullopt, std::nullopt);
4949
"""
5050
),
5151
},

benchmarks/static_runtime/test_static_runtime.cc

+4-4
Original file line numberDiff line numberDiff line change
@@ -1443,7 +1443,7 @@ TEST(StaticRuntime, to) {
14431443
std::vector<IValue> args2{a, other, c, d, e};
14441444
std::vector<IValue> args2WithDifferentOtherType{
14451445
a, at::randn({4, 3, 1, 2}, ScalarType::Double), c, d, e};
1446-
std::vector<IValue> args3{a, c10::nullopt, c, d};
1446+
std::vector<IValue> args3{a, std::nullopt, c, d};
14471447

14481448
std::vector<IValue> args0WithInt{a, ScalarType::Int, c, d, e};
14491449
testStaticRuntime(
@@ -1478,7 +1478,7 @@ TEST(StaticRuntime, to) {
14781478
testStaticRuntime(to_script_dtype_strided, args0, {a2, b, c, d, e});
14791479
testStaticRuntime(to_script_prim_dtype, args1, {a2, b, c, d});
14801480
if (!d) {
1481-
testStaticRuntime(to_script_prim_dtype, args3, {a2, c10::nullopt, c, d});
1481+
testStaticRuntime(to_script_prim_dtype, args3, {a2, std::nullopt, c, d});
14821482
}
14831483
testStaticRuntime(to_script_other, args2, {a2, a2_other, c, d, e});
14841484
testStaticRuntime(to_script_alias, {a}, {a2});
@@ -1758,14 +1758,14 @@ TEST(StaticRuntime, Linear) {
17581758
auto bias = at::randn({1, 1});
17591759

17601760
std::vector<IValue> args{input, weights, bias};
1761-
std::vector<IValue> args_no_bias{input, weights, c10::nullopt};
1761+
std::vector<IValue> args_no_bias{input, weights, std::nullopt};
17621762

17631763
auto input2 = at::randn({6, 3});
17641764
auto weights2 = at::randn({6, 3});
17651765
auto bias2 = at::randn({6, 6});
17661766

17671767
std::vector<IValue> args2{input2, weights2, bias2};
1768-
std::vector<IValue> args2_no_bias{input2, weights2, c10::nullopt};
1768+
std::vector<IValue> args2_no_bias{input2, weights2, std::nullopt};
17691769

17701770
testStaticRuntime(linear_script, args);
17711771
testStaticRuntime(linear_script, args_no_bias);

c10/core/DeviceGuard.h

+8-8
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ namespace c10 {
1919
/// This device guard does NOT have an uninitialized state; it is guaranteed
2020
/// to reset a device on exit. If you are in a situation where you *might*
2121
/// want to setup a guard (i.e., are looking for the moral equivalent
22-
/// of optional<DeviceGuard>), see OptionalDeviceGuard.
22+
/// of std::optional<DeviceGuard>), see OptionalDeviceGuard.
2323
class DeviceGuard {
2424
public:
2525
/// No default constructor; see Note [Omitted default constructor from RAII]
@@ -85,8 +85,8 @@ class DeviceGuard {
8585
/**
8686
* A OptionalDeviceGuard is an RAII class that sets a device to some value on
8787
* initialization, and resets the device to its original value on destruction.
88-
* Morally, a OptionalDeviceGuard is equivalent to optional<DeviceGuard>, but
89-
* with extra constructors and methods as appropriate.
88+
* Morally, a OptionalDeviceGuard is equivalent to std::optional<DeviceGuard>,
89+
* but with extra constructors and methods as appropriate.
9090
*
9191
* Besides its obvious use (optionally applying a DeviceGuard),
9292
* OptionalDeviceGuard is often also used for the following idiom:
@@ -104,12 +104,12 @@ class DeviceGuard {
104104
* when you use the nullary constructor, or pass a nullopt to the constructor.
105105
* Uninitialized OptionalDeviceGuards do *nothing*; they do not know what the
106106
* original device was and they do not reset on destruction. This is why
107-
* original_device() and current_device() return optional<Device> rather than
108-
* Device (as they do in DeviceGuard), and also is why we didn't just
107+
* original_device() and current_device() return std::optional<Device> rather
108+
* than Device (as they do in DeviceGuard), and also is why we didn't just
109109
* provide OptionalDeviceGuard by default and hide DeviceGuard from users.
110110
*
111111
* The semantics of an OptionalDeviceGuard are exactly explained by thinking
112-
* of it as an optional<DeviceGuard>. In particular, an initialized
112+
* of it as an std::optional<DeviceGuard>. In particular, an initialized
113113
* OptionalDeviceGuard doesn't restore device to its value at construction; it
114114
* restores device to its value *at initialization*. So if you have the
115115
* program:
@@ -168,13 +168,13 @@ class OptionalDeviceGuard {
168168
}
169169

170170
/// Returns the device that was set at the time the guard was constructed.
171-
optional<Device> original_device() const {
171+
std::optional<Device> original_device() const {
172172
return guard_.original_device();
173173
}
174174

175175
/// Returns the most recent device that was set using this device guard,
176176
/// either from construction, or via reset_device.
177-
optional<Device> current_device() const {
177+
std::optional<Device> current_device() const {
178178
return guard_.current_device();
179179
}
180180

c10/core/DispatchKey.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -139,10 +139,10 @@ enum class DispatchKey : uint16_t {
139139
// element we can return for cases when a DispatchKeySet contains no elements.
140140
// You can think a more semantically accurate definition of DispatchKey is:
141141
//
142-
// using DispatchKey = optional<RealDispatchKey>
142+
// using DispatchKey = std::optional<RealDispatchKey>
143143
//
144144
// and Undefined == nullopt. We didn't actually represent
145-
// it this way because optional<RealDispatchKey> would take two
145+
// it this way because std::optional<RealDispatchKey> would take two
146146
// words, when DispatchKey fits in eight bits.
147147

148148
Undefined = 0,

c10/core/DispatchKeySet.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -819,7 +819,7 @@ C10_API DispatchKeySet getBackendKeySetFromAutograd(DispatchKey t);
819819
// for a given backend key, use the associated autograd key.
820820
// for non-backend keys, use AutogradOther as a default.
821821
// Note: it's convenient and fast to return a default here rather than (say)
822-
// returning an optional<DispatchKey>, or throwing. But it makes callers
822+
// returning an std::optional<DispatchKey>, or throwing. But it makes callers
823823
// responsible for either a) enforcing the invariant that only backend keys
824824
// be passed as arguments, or b) interpreting our return value carefully.
825825
inline DispatchKeySet getAutogradRelatedKeySetFromBackend(BackendComponent t) {

c10/core/ScalarTypeToTypeMeta.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@ inline ScalarType typeMetaToScalarType(caffe2::TypeMeta dtype) {
2727
/**
2828
* typeMetaToScalarType(), lifted to optional
2929
*/
30-
inline optional<at::ScalarType> optTypeMetaToScalarType(
31-
optional<caffe2::TypeMeta> type_meta) {
30+
inline std::optional<at::ScalarType> optTypeMetaToScalarType(
31+
std::optional<caffe2::TypeMeta> type_meta) {
3232
if (!type_meta.has_value()) {
3333
return std::nullopt;
3434
}

c10/core/StreamGuard.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -123,14 +123,14 @@ struct OptionalStreamGuard {
123123

124124
/// Returns the stream that was set at the time the guard was most recently
125125
/// initialized, or nullopt if the guard is uninitialized.
126-
optional<Stream> original_stream() const {
126+
std::optional<Stream> original_stream() const {
127127
return guard_.original_stream();
128128
}
129129

130130
/// Returns the most recent stream that was set using this stream guard,
131131
/// either from construction, or via reset_stream, if the guard is
132132
/// initialized, or nullopt if the guard is uninitialized.
133-
optional<Stream> current_stream() const {
133+
std::optional<Stream> current_stream() const {
134134
return guard_.current_stream();
135135
}
136136

c10/core/TensorOptions.h

+3-3
Original file line numberDiff line numberDiff line change
@@ -132,9 +132,9 @@ inline bool pinned_memory_or_default(std::optional<bool> pinned_memory) {
132132
/// resolution is done before template resolution, our problem is solved.
133133

134134
DispatchKey computeDispatchKey(
135-
optional<ScalarType> dtype,
136-
optional<Layout> layout,
137-
optional<Device> device);
135+
std::optional<ScalarType> dtype,
136+
std::optional<Layout> layout,
137+
std::optional<Device> device);
138138

139139
struct C10_API TensorOptions {
140140
TensorOptions()

c10/core/impl/InlineDeviceGuard.h

+5-5
Original file line numberDiff line numberDiff line change
@@ -403,17 +403,17 @@ class InlineOptionalDeviceGuard {
403403

404404
/// Returns the device that was set immediately prior to initialization of
405405
/// the, guard, or nullopt if the guard is uninitialized.
406-
optional<Device> original_device() const {
406+
std::optional<Device> original_device() const {
407407
return guard_.has_value() ? std::make_optional(guard_->original_device())
408-
: nullopt;
408+
: std::nullopt;
409409
}
410410

411411
/// Returns the most recent device that was set using this device guard,
412412
/// either from construction, or via set_device, if the guard is initialized,
413413
/// or nullopt if the guard is uninitialized.
414-
optional<Device> current_device() const {
414+
std::optional<Device> current_device() const {
415415
return guard_.has_value() ? std::make_optional(guard_->current_device())
416-
: nullopt;
416+
: std::nullopt;
417417
}
418418

419419
/// Restore the original device, resetting this guard to uninitialized state.
@@ -422,7 +422,7 @@ class InlineOptionalDeviceGuard {
422422
}
423423

424424
private:
425-
optional<InlineDeviceGuard<T>> guard_;
425+
std::optional<InlineDeviceGuard<T>> guard_;
426426
};
427427

428428
} // namespace c10::impl

0 commit comments

Comments
 (0)