Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 2 additions & 8 deletions test/cpp/torch_xla_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,13 @@
#include "torch_xla/csrc/device.h"
#include "torch_xla/csrc/helpers.h"
#include "torch_xla/csrc/tensor.h"

namespace at {
// This function is defined in the codegenerated RegisterDispatchKey.cpp file.
extern TORCH_API void RegisterXLAXLANativeFunctions();
extern TORCH_API void RegisterXLAAutogradXLANativeFunctions();
} // namespace at
#include "torch_xla/csrc/xla_backend_impl.h"

namespace torch_xla {
namespace cpp_test {

void XlaTest::SetUp() {
at::RegisterXLAXLANativeFunctions();
at::RegisterXLAAutogradXLANativeFunctions();
InitXlaBackend();
at::manual_seed(42);
XLATensor::SetRngSeed(GetCurrentDevice(), 42);
start_msnap_ = absl::make_unique<MetricsSnapshot>();
Expand Down
58 changes: 49 additions & 9 deletions torch_xla/csrc/xla_backend_impl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,30 @@ namespace torch_xla {
class XlaBackendImpl : public torch::lazy::BackendImplInterface {
public:
XlaBackendImpl() {}

bool InitDefaultDeviceType() {
if (!default_device_type_inited_) {
// GetDefaultDevice will trigger the runtime device init, should
// not do it during class init time.
torch::lazy::BackendDevice default_device = *GetDefaultDevice();
default_device_type_ = std::make_shared<DeviceType>(
static_cast<XlaDeviceType>(default_device.type()));
default_device_type_inited_ = true;
}
return true;
}

bool InitDefaultDeviceOrdinal() {
if (!default_device_ordinal_inited_) {
// GetDefaultDevice will trigger the runtime device init, should
// not do it during class init time.
torch::lazy::BackendDevice default_device = *GetDefaultDevice();
default_device_ordinal_ = default_device.ordinal();
default_device_ordinal_inited_ = true;
}
return true;
}

void PrepareToExit() const override { XLA_ERROR() << "Not implemented yet"; }

void SetRngSeed(size_t seed) const override {
Expand Down Expand Up @@ -149,18 +173,31 @@ class XlaBackendImpl : public torch::lazy::BackendImplInterface {

std::shared_ptr<torch::lazy::BackendDeviceType> GetDefaultDeviceType()
const override {
// want to reuse the getDefualtDeviceTypelogic
torch::lazy::BackendDevice default_device = *GetDefaultDevice();
return std::make_shared<DeviceType>(
static_cast<XlaDeviceType>(default_device.type()));
// lazily init default device type, we only need to init once.
static bool init =
const_cast<XlaBackendImpl*>(this)->InitDefaultDeviceType();
return default_device_type_;
}

at::DeviceType EagerFallbackDeviceType() const override {
return at::DeviceType::CPU;
void SetDefaultDeviceType(int8_t type) override {
default_device_type_ =
std::make_shared<DeviceType>(static_cast<XlaDeviceType>(type));
default_device_type_inited_ = true;
}

void SetDefaultDeviceType(std::string type) override {
default_device_type_ = XlaDeviceType(c10::Device(type).type());
int64_t GetDefaultDeviceOrdinal() const override {
// lazily init default device ordinal, we only need to init once.
static bool init =
const_cast<XlaBackendImpl*>(this)->InitDefaultDeviceOrdinal();
return default_device_ordinal_;
}
void SetDefaultDeviceOrdinal(int64_t ordinal) override {
default_device_ordinal_ = ordinal;
default_device_ordinal_inited_ = true;
}

at::DeviceType EagerFallbackDeviceType() const override {
return at::DeviceType::CPU;
}

std::vector<torch::lazy::BackendDevice> GetBackendDevices() const override {
Expand All @@ -179,7 +216,10 @@ class XlaBackendImpl : public torch::lazy::BackendImplInterface {
}

private:
DeviceType default_device_type_;
bool default_device_type_inited_ = false;
bool default_device_ordinal_inited_ = false;
std::shared_ptr<torch::lazy::BackendDeviceType> default_device_type_;
int64_t default_device_ordinal_;
};

torch::lazy::BackendImplInterface* GetXlaBackendImpl() {
Expand Down