17fa27ce4SDimitry Andric //===- InteractiveModelRunner.cpp - noop ML model runner ----------------===//
27fa27ce4SDimitry Andric //
37fa27ce4SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
47fa27ce4SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
57fa27ce4SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
67fa27ce4SDimitry Andric //
77fa27ce4SDimitry Andric //===----------------------------------------------------------------------===//
87fa27ce4SDimitry Andric //
97fa27ce4SDimitry Andric // A runner that communicates with an external agent via 2 file descriptors.
107fa27ce4SDimitry Andric //===----------------------------------------------------------------------===//
117fa27ce4SDimitry Andric #include "llvm/Analysis/InteractiveModelRunner.h"
127fa27ce4SDimitry Andric #include "llvm/Analysis/MLModelRunner.h"
137fa27ce4SDimitry Andric #include "llvm/Analysis/TensorSpec.h"
147fa27ce4SDimitry Andric #include "llvm/Support/CommandLine.h"
157fa27ce4SDimitry Andric #include "llvm/Support/ErrorHandling.h"
167fa27ce4SDimitry Andric #include "llvm/Support/FileSystem.h"
177fa27ce4SDimitry Andric #include "llvm/Support/raw_ostream.h"
187fa27ce4SDimitry Andric
197fa27ce4SDimitry Andric using namespace llvm;
207fa27ce4SDimitry Andric
217fa27ce4SDimitry Andric static cl::opt<bool> DebugReply(
227fa27ce4SDimitry Andric "interactive-model-runner-echo-reply", cl::init(false), cl::Hidden,
237fa27ce4SDimitry Andric cl::desc("The InteractiveModelRunner will echo back to stderr "
247fa27ce4SDimitry Andric "the data received from the host (for debugging purposes)."));
257fa27ce4SDimitry Andric
InteractiveModelRunner(LLVMContext & Ctx,const std::vector<TensorSpec> & Inputs,const TensorSpec & Advice,StringRef OutboundName,StringRef InboundName)267fa27ce4SDimitry Andric InteractiveModelRunner::InteractiveModelRunner(
277fa27ce4SDimitry Andric LLVMContext &Ctx, const std::vector<TensorSpec> &Inputs,
287fa27ce4SDimitry Andric const TensorSpec &Advice, StringRef OutboundName, StringRef InboundName)
297fa27ce4SDimitry Andric : MLModelRunner(Ctx, MLModelRunner::Kind::Interactive, Inputs.size()),
307fa27ce4SDimitry Andric InputSpecs(Inputs), OutputSpec(Advice),
317fa27ce4SDimitry Andric InEC(sys::fs::openFileForRead(InboundName, Inbound)),
327fa27ce4SDimitry Andric OutputBuffer(OutputSpec.getTotalTensorBufferSize()) {
337fa27ce4SDimitry Andric if (InEC) {
347fa27ce4SDimitry Andric Ctx.emitError("Cannot open inbound file: " + InEC.message());
357fa27ce4SDimitry Andric return;
367fa27ce4SDimitry Andric }
377fa27ce4SDimitry Andric {
387fa27ce4SDimitry Andric auto OutStream = std::make_unique<raw_fd_ostream>(OutboundName, OutEC);
397fa27ce4SDimitry Andric if (OutEC) {
407fa27ce4SDimitry Andric Ctx.emitError("Cannot open outbound file: " + OutEC.message());
417fa27ce4SDimitry Andric return;
427fa27ce4SDimitry Andric }
437fa27ce4SDimitry Andric Log = std::make_unique<Logger>(std::move(OutStream), InputSpecs, Advice,
447fa27ce4SDimitry Andric /*IncludeReward=*/false, Advice);
457fa27ce4SDimitry Andric }
467fa27ce4SDimitry Andric // Just like in the no inference case, this will allocate an appropriately
477fa27ce4SDimitry Andric // sized buffer.
487fa27ce4SDimitry Andric for (size_t I = 0; I < InputSpecs.size(); ++I)
497fa27ce4SDimitry Andric setUpBufferForTensor(I, InputSpecs[I], nullptr);
507fa27ce4SDimitry Andric Log->flush();
517fa27ce4SDimitry Andric }
527fa27ce4SDimitry Andric
~InteractiveModelRunner()537fa27ce4SDimitry Andric InteractiveModelRunner::~InteractiveModelRunner() {
547fa27ce4SDimitry Andric sys::fs::file_t FDAsOSHandle = sys::fs::convertFDToNativeFile(Inbound);
557fa27ce4SDimitry Andric sys::fs::closeFile(FDAsOSHandle);
567fa27ce4SDimitry Andric }
577fa27ce4SDimitry Andric
evaluateUntyped()587fa27ce4SDimitry Andric void *InteractiveModelRunner::evaluateUntyped() {
597fa27ce4SDimitry Andric Log->startObservation();
607fa27ce4SDimitry Andric for (size_t I = 0; I < InputSpecs.size(); ++I)
617fa27ce4SDimitry Andric Log->logTensorValue(I, reinterpret_cast<const char *>(getTensorUntyped(I)));
627fa27ce4SDimitry Andric Log->endObservation();
637fa27ce4SDimitry Andric Log->flush();
647fa27ce4SDimitry Andric
657fa27ce4SDimitry Andric size_t InsPoint = 0;
667fa27ce4SDimitry Andric char *Buff = OutputBuffer.data();
677fa27ce4SDimitry Andric const size_t Limit = OutputBuffer.size();
687fa27ce4SDimitry Andric while (InsPoint < Limit) {
697fa27ce4SDimitry Andric auto ReadOrErr = ::sys::fs::readNativeFile(
707fa27ce4SDimitry Andric sys::fs::convertFDToNativeFile(Inbound),
717fa27ce4SDimitry Andric {Buff + InsPoint, OutputBuffer.size() - InsPoint});
727fa27ce4SDimitry Andric if (ReadOrErr.takeError()) {
737fa27ce4SDimitry Andric Ctx.emitError("Failed reading from inbound file");
747fa27ce4SDimitry Andric break;
757fa27ce4SDimitry Andric }
767fa27ce4SDimitry Andric InsPoint += *ReadOrErr;
777fa27ce4SDimitry Andric }
787fa27ce4SDimitry Andric if (DebugReply)
797fa27ce4SDimitry Andric dbgs() << OutputSpec.name() << ": "
807fa27ce4SDimitry Andric << tensorValueToString(OutputBuffer.data(), OutputSpec) << "\n";
817fa27ce4SDimitry Andric return OutputBuffer.data();
827fa27ce4SDimitry Andric }
83