@@ -33,7 +33,7 @@ struct DummyDataset : datasets::Dataset<DummyDataset, int> {
33
33
// NOLINTNEXTLINE(cppcoreguidelines-narrowing-conversions,bugprone-narrowing-conversions)
34
34
return 1 + index ;
35
35
}
36
- torch ::optional<size_t > size () const override {
36
+ std ::optional<size_t > size () const override {
37
37
return size_;
38
38
}
39
39
@@ -151,8 +151,8 @@ struct InfiniteStreamDataset
151
151
return batch;
152
152
}
153
153
154
- torch ::optional<size_t > size () const override {
155
- return torch ::nullopt;
154
+ std ::optional<size_t > size () const override {
155
+ return std ::nullopt;
156
156
}
157
157
158
158
size_t counter = 0 ;
@@ -459,7 +459,7 @@ TEST(DataTest, StackTransformWorksForExample) {
459
459
return {tensor[index ], 1 + tensor[index ]};
460
460
}
461
461
462
- torch ::optional<size_t > size () const override {
462
+ std ::optional<size_t > size () const override {
463
463
return tensor.size (0 );
464
464
}
465
465
@@ -503,7 +503,7 @@ struct TensorStringDataset
503
503
return {torch::tensor (static_cast <double >(index )), std::to_string (index )};
504
504
}
505
505
506
- torch ::optional<size_t > size () const override {
506
+ std ::optional<size_t > size () const override {
507
507
return 100 ;
508
508
}
509
509
};
@@ -542,7 +542,7 @@ struct DummyTensorDataset
542
542
return {tensor, static_cast <int >(channels)};
543
543
}
544
544
545
- torch ::optional<size_t > size () const override {
545
+ std ::optional<size_t > size () const override {
546
546
return 100 ;
547
547
}
548
548
};
@@ -624,7 +624,7 @@ struct UnCopyableDataset : public datasets::Dataset<UnCopyableDataset> {
624
624
torch::tensor ({static_cast <int64_t >(index )})};
625
625
}
626
626
627
- torch ::optional<size_t > size () const override {
627
+ std ::optional<size_t > size () const override {
628
628
return 100 ;
629
629
}
630
630
};
@@ -753,7 +753,7 @@ struct UncopyableDataset : datasets::Dataset<UncopyableDataset, int> {
753
753
// NOLINTNEXTLINE(cppcoreguidelines-narrowing-conversions,bugprone-narrowing-conversions)
754
754
return 1 + index ;
755
755
}
756
- torch ::optional<size_t > size () const override {
756
+ std ::optional<size_t > size () const override {
757
757
return 100 ;
758
758
}
759
759
};
@@ -806,18 +806,18 @@ struct TestIndexDataset
806
806
}
807
807
return batch;
808
808
}
809
- torch ::optional<size_t > size () const override {
809
+ std ::optional<size_t > size () const override {
810
810
return data.size ();
811
811
}
812
812
std::vector<int > data;
813
813
};
814
814
815
815
struct TestIndexSampler : public samplers ::Sampler<TestIndex> {
816
816
explicit TestIndexSampler (size_t size) : size_(size) {}
817
- void reset (torch ::optional<size_t > new_size = torch ::nullopt) override {}
818
- torch ::optional<TestIndex> next (size_t batch_size) override {
817
+ void reset (std ::optional<size_t > new_size = std ::nullopt) override {}
818
+ std ::optional<TestIndex> next (size_t batch_size) override {
819
819
if (index_ >= size_) {
820
- return torch ::nullopt;
820
+ return std ::nullopt;
821
821
}
822
822
std::vector<size_t > indices (batch_size);
823
823
std::iota (indices.begin (), indices.end (), size_t (0 ));
@@ -847,7 +847,7 @@ TEST(DataTest, DistributedRandomSamplerSingleReplicaProduceCorrectSamples) {
847
847
samplers::DistributedRandomSampler drs (sample_count);
848
848
849
849
std::vector<size_t > res;
850
- torch ::optional<std::vector<size_t >> idx;
850
+ std ::optional<std::vector<size_t >> idx;
851
851
while ((idx = drs.next (3 )).has_value ()) {
852
852
res.insert (std::end (res), std::begin (*idx), std::end (*idx));
853
853
}
@@ -879,7 +879,7 @@ TEST(DataTest, DistributedRandomSamplerMultiReplicaProduceCorrectSamples) {
879
879
std::vector<size_t > res;
880
880
for (const auto i : c10::irange (num_replicas)) {
881
881
(*samplers[i]).reset ();
882
- torch ::optional<std::vector<size_t >> idx;
882
+ std ::optional<std::vector<size_t >> idx;
883
883
while ((idx = (*samplers[i]).next (batch_size)).has_value ()) {
884
884
res.insert (std::end (res), std::begin (*idx), std::end (*idx));
885
885
}
@@ -943,7 +943,7 @@ TEST(DataTest, DistributedSequentialSamplerSingleReplicaProduceCorrectSamples) {
943
943
samplers::DistributedSequentialSampler dss (sample_count);
944
944
945
945
std::vector<size_t > res;
946
- torch ::optional<std::vector<size_t >> idx;
946
+ std ::optional<std::vector<size_t >> idx;
947
947
while ((idx = dss.next (batch_size)).has_value ()) {
948
948
res.insert (std::end (res), std::begin (*idx), std::end (*idx));
949
949
}
@@ -976,7 +976,7 @@ TEST(DataTest, DistributedSequentialSamplerMultiReplicaProduceCorrectSamples) {
976
976
std::vector<size_t > res;
977
977
for (const auto i : c10::irange (num_replicas)) {
978
978
(*samplers[i]).reset ();
979
- torch ::optional<std::vector<size_t >> idx;
979
+ std ::optional<std::vector<size_t >> idx;
980
980
while ((idx = (*samplers[i]).next (batch_size)).has_value ()) {
981
981
res.insert (std::end (res), std::begin (*idx), std::end (*idx));
982
982
}
@@ -1052,8 +1052,8 @@ struct UnsizedDataset : public datasets::Dataset<UnsizedDataset> {
1052
1052
torch::data::Example<> get (size_t i) override {
1053
1053
return {torch::ones (i), torch::ones (i)};
1054
1054
}
1055
- torch ::optional<size_t > size () const noexcept override {
1056
- return torch ::nullopt;
1055
+ std ::optional<size_t > size () const noexcept override {
1056
+ return std ::nullopt;
1057
1057
}
1058
1058
};
1059
1059
@@ -1150,7 +1150,7 @@ TEST(DataLoaderTest, CanUseIteratorAlgorithms) {
1150
1150
// NOLINTNEXTLINE(cppcoreguidelines-narrowing-conversions,bugprone-narrowing-conversions)
1151
1151
return 1 + indices.front ();
1152
1152
}
1153
- torch ::optional<size_t > size () const override {
1153
+ std ::optional<size_t > size () const override {
1154
1154
return 10 ;
1155
1155
}
1156
1156
};
@@ -1270,7 +1270,7 @@ TEST(DataLoaderTest, RespectsTimeout) {
1270
1270
baton->cv .wait_for (lock, 1000 * kMillisecond );
1271
1271
return 0 ;
1272
1272
}
1273
- torch ::optional<size_t > size () const override {
1273
+ std ::optional<size_t > size () const override {
1274
1274
return 100 ;
1275
1275
}
1276
1276
std::shared_ptr<Baton> baton;
@@ -1388,7 +1388,7 @@ struct Dataset : datasets::BatchDataset<Dataset, size_t> {
1388
1388
return indices.front ();
1389
1389
}
1390
1390
1391
- torch ::optional<size_t > size () const override {
1391
+ std ::optional<size_t > size () const override {
1392
1392
return kNumberOfWorkers ;
1393
1393
}
1394
1394
@@ -1441,7 +1441,7 @@ TEST(DataLoaderTest, TestExceptionsArePropagatedFromWorkers) {
1441
1441
int get (size_t index) override {
1442
1442
throw std::invalid_argument (" badness" );
1443
1443
}
1444
- torch ::optional<size_t > size () const override {
1444
+ std ::optional<size_t > size () const override {
1445
1445
return 100 ;
1446
1446
}
1447
1447
};
@@ -1467,13 +1467,13 @@ TEST(DataLoaderTest, StatefulDatasetWithNoWorkers) {
1467
1467
const int kNumberOfExamplesAfterWhichTheDatasetExhausts = 10 ;
1468
1468
1469
1469
struct D : datasets::StatefulDataset<D, int , size_t > {
1470
- torch ::optional<int > get_batch (size_t ) override {
1470
+ std ::optional<int > get_batch (size_t ) override {
1471
1471
if (counter < kNumberOfExamplesAfterWhichTheDatasetExhausts ) {
1472
1472
return counter++;
1473
1473
}
1474
- return torch ::nullopt;
1474
+ return std ::nullopt;
1475
1475
}
1476
- torch ::optional<size_t > size () const override {
1476
+ std ::optional<size_t > size () const override {
1477
1477
return 100 ;
1478
1478
}
1479
1479
void reset () override {
@@ -1504,14 +1504,14 @@ TEST(DataLoaderTest, StatefulDatasetWithManyWorkers) {
1504
1504
const int kNumberOfWorkers = 4 ;
1505
1505
1506
1506
struct D : datasets::StatefulDataset<D, int , size_t > {
1507
- torch ::optional<int > get_batch (size_t ) override {
1507
+ std ::optional<int > get_batch (size_t ) override {
1508
1508
std::lock_guard<std::mutex> lock (mutex);
1509
1509
if (counter < kNumberOfExamplesAfterWhichTheDatasetExhausts ) {
1510
1510
return counter++;
1511
1511
}
1512
- return torch ::nullopt;
1512
+ return std ::nullopt;
1513
1513
}
1514
- torch ::optional<size_t > size () const override {
1514
+ std ::optional<size_t > size () const override {
1515
1515
return 100 ;
1516
1516
}
1517
1517
void reset () override {
@@ -1544,13 +1544,13 @@ TEST(DataLoaderTest, StatefulDatasetWithMap) {
1544
1544
const int kNumberOfExamplesAfterWhichTheDatasetExhausts = 10 ;
1545
1545
1546
1546
struct D : datasets::StatefulDataset<D, int , size_t > {
1547
- torch ::optional<int > get_batch (size_t ) override {
1547
+ std ::optional<int > get_batch (size_t ) override {
1548
1548
if (counter < kNumberOfExamplesAfterWhichTheDatasetExhausts ) {
1549
1549
return counter++;
1550
1550
}
1551
- return torch ::nullopt;
1551
+ return std ::nullopt;
1552
1552
}
1553
- torch ::optional<size_t > size () const override {
1553
+ std ::optional<size_t > size () const override {
1554
1554
return 100 ;
1555
1555
}
1556
1556
void reset () override {
@@ -1587,7 +1587,7 @@ TEST(DataLoaderTest, StatefulDatasetWithCollate) {
1587
1587
const int kNumberOfExamplesAfterWhichTheDatasetExhausts = 10 ;
1588
1588
1589
1589
struct D : datasets::StatefulDataset<D> {
1590
- torch ::optional<std::vector<Example<>>> get_batch (
1590
+ std ::optional<std::vector<Example<>>> get_batch (
1591
1591
size_t batch_size) override {
1592
1592
if (counter < kNumberOfExamplesAfterWhichTheDatasetExhausts ) {
1593
1593
counter += batch_size;
@@ -1597,9 +1597,9 @@ TEST(DataLoaderTest, StatefulDatasetWithCollate) {
1597
1597
torch::ones (batch_size + 1 ), torch::zeros (batch_size - 1 )});
1598
1598
return batch;
1599
1599
}
1600
- return torch ::nullopt;
1600
+ return std ::nullopt;
1601
1601
}
1602
- torch ::optional<size_t > size () const override {
1602
+ std ::optional<size_t > size () const override {
1603
1603
return 100 ;
1604
1604
}
1605
1605
void reset () override {
@@ -1616,7 +1616,7 @@ TEST(DataLoaderTest, StatefulDatasetWithCollate) {
1616
1616
1617
1617
// Notice that the `get_batch()` of the dataset returns a vector<Example>, but
1618
1618
// the `Stack` collation stacks the tensors into one.
1619
- torch ::optional<Example<>> batch = d.get_batch (kBatchSize );
1619
+ std ::optional<Example<>> batch = d.get_batch (kBatchSize );
1620
1620
ASSERT_TRUE (batch.has_value ());
1621
1621
ASSERT_EQ (batch->data .size (0 ), kBatchSize );
1622
1622
ASSERT_EQ (batch->data .size (1 ), kBatchSize + 1 );
@@ -2117,7 +2117,7 @@ TEST(DataLoaderTest, ChunkDatasetCrossChunkShuffle) {
2117
2117
public:
2118
2118
explicit S (size_t size) : size_(size), index_(0 ){};
2119
2119
2120
- void reset (torch ::optional<size_t > new_size = torch ::nullopt) override {
2120
+ void reset (std ::optional<size_t > new_size = std ::nullopt) override {
2121
2121
if (new_size.has_value ()) {
2122
2122
size_ = *new_size;
2123
2123
}
@@ -2134,10 +2134,10 @@ TEST(DataLoaderTest, ChunkDatasetCrossChunkShuffle) {
2134
2134
}
2135
2135
2136
2136
// Returns the next batch of indices.
2137
- torch ::optional<std::vector<size_t >> next (size_t batch_size) override {
2137
+ std ::optional<std::vector<size_t >> next (size_t batch_size) override {
2138
2138
const auto remaining_indices = size_ - index_;
2139
2139
if (remaining_indices == 0 ) {
2140
- return torch ::nullopt;
2140
+ return std ::nullopt;
2141
2141
}
2142
2142
auto return_size = std::min (batch_size, remaining_indices);
2143
2143
std::vector<size_t > index_batch (
0 commit comments