xref: /llvm-project/llvm/lib/Analysis/InteractiveModelRunner.cpp (revision 83051c5a5f09fe4de5d5b504e5fb864060e9794b)
1 //===- InteractiveModelRunner.cpp - noop ML model runner   ----------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // A runner that communicates with an external agent via 2 file descriptors.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/Analysis/InteractiveModelRunner.h"
12 #include "llvm/Analysis/MLModelRunner.h"
13 #include "llvm/Analysis/TensorSpec.h"
14 #include "llvm/Support/CommandLine.h"
15 #include "llvm/Support/ErrorHandling.h"
16 #include "llvm/Support/FileSystem.h"
17 #include "llvm/Support/raw_ostream.h"
18 
19 using namespace llvm;
20 
21 #define _IMR_CL_VALS(T, N) clEnumValN(TensorType::N, #T, #T),
22 
23 static cl::opt<TensorType> DebugReply(
24     "interactive-model-runner-echo-type", cl::init(TensorType::Invalid),
25     cl::Hidden,
26     cl::desc("The InteractiveModelRunner will echo back to stderr "
27              "the data received "
28              "from the host as the specified type (for debugging purposes)."),
29     cl::values(SUPPORTED_TENSOR_TYPES(_IMR_CL_VALS)
30                    clEnumValN(TensorType::Invalid, "disable", "Don't echo")));
31 
32 #undef _IMR_CL_VALS
33 
34 InteractiveModelRunner::InteractiveModelRunner(
35     LLVMContext &Ctx, const std::vector<TensorSpec> &Inputs,
36     const TensorSpec &Advice, StringRef OutboundName, StringRef InboundName)
37     : MLModelRunner(Ctx, MLModelRunner::Kind::Interactive, Inputs.size()),
38       InputSpecs(Inputs), OutputSpec(Advice),
39       InEC(sys::fs::openFileForRead(InboundName, Inbound)),
40       OutputBuffer(OutputSpec.getTotalTensorBufferSize()) {
41   if (InEC) {
42     Ctx.emitError("Cannot open inbound file: " + InEC.message());
43     return;
44   }
45   {
46     auto OutStream = std::make_unique<raw_fd_ostream>(OutboundName, OutEC);
47     if (OutEC) {
48       Ctx.emitError("Cannot open outbound file: " + OutEC.message());
49       return;
50     }
51     Log = std::make_unique<Logger>(std::move(OutStream), InputSpecs, Advice,
52                                    /*IncludeReward=*/false, Advice);
53   }
54   // Just like in the no inference case, this will allocate an appropriately
55   // sized buffer.
56   for (size_t I = 0; I < InputSpecs.size(); ++I)
57     setUpBufferForTensor(I, InputSpecs[I], nullptr);
58   Log->flush();
59 }
60 
61 InteractiveModelRunner::~InteractiveModelRunner() {
62   sys::fs::closeFile(Inbound);
63 }
64 
65 void *InteractiveModelRunner::evaluateUntyped() {
66   Log->startObservation();
67   for (size_t I = 0; I < InputSpecs.size(); ++I)
68     Log->logTensorValue(I, reinterpret_cast<const char *>(getTensorUntyped(I)));
69   Log->endObservation();
70   Log->flush();
71 
72   size_t InsPoint = 0;
73   char *Buff = OutputBuffer.data();
74   const size_t Limit = OutputBuffer.size();
75   while (InsPoint < Limit) {
76     auto ReadOrErr = ::sys::fs::readNativeFile(
77         Inbound, {Buff + InsPoint, OutputBuffer.size() - InsPoint});
78     if (ReadOrErr.takeError()) {
79       Ctx.emitError("Failed reading from inbound file");
80       break;
81     }
82     InsPoint += *ReadOrErr;
83   }
84   if (DebugReply != TensorType::Invalid)
85     dbgs() << tensorValueToString(OutputBuffer.data(), OutputSpec);
86   return OutputBuffer.data();
87 }
88