entrypoint.cpp 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. #include "entrypoint.hpp"
  2. namespace tf_lib {
  3. using namespace tensorflow;
  4. using namespace tensorflow::shape_inference;
  5. REGISTER_OP("MyDummy")
  6. .Input("input: int32")
  7. .Output("output: int32")
  8. .SetShapeFn([](InferenceContext* c) {
  9. c->set_output(0, c->input(0));
  10. return Status::OK();
  11. });
  12. ;
  13. REGISTER_KERNEL_BUILDER(Name("MyDummy").Device(DEVICE_CPU), DummyOp);
  14. REGISTER_OP("MyDummyBig")
  15. .Input("input: int32")
  16. .Output("output: int32")
  17. .SetShapeFn([](InferenceContext* c) {
  18. c->set_output(0, c->input(0));
  19. return Status::OK();
  20. });
  21. ;
  22. REGISTER_KERNEL_BUILDER(Name("MyDummyBig").Device(DEVICE_CPU), DummyBigOp);
  23. ConnectionManager connectionManager;
  24. bool hasInitialized = false;
  25. void init() {
  26. if(hasInitialized)
  27. return;
  28. std::ifstream configStream("config.json");
  29. nlohmann::json config;
  30. configStream >> config;
  31. auto fpgas = config["fpgas"];
  32. for(uint i=0; i<fpgas.size(); i++) {
  33. string ip = fpgas[i]["ip"];
  34. const uint port = fpgas[i]["port"];
  35. connectionManager.addFPGA(ip.c_str(), port);
  36. printf("added fpga %u at %s:%u\n", i, ip.c_str(), port);
  37. }
  38. connectionManager.start();
  39. printf("fpga server started\n");
  40. hasInitialized = true;
  41. }
  42. void __attribute__ ((constructor)) construct(void) {
  43. printf("fpga library loaded\n");
  44. }
  45. }