Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Temporary Function Unit Tests #7

Merged
merged 4 commits into from
Aug 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .github/workflows/_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,8 @@ jobs:
- name: Setup Java
uses: graalvm/setup-graalvm@v1
with:
java-version: '17'
java-version: '22'
distribution: 'graalvm-community'


- name: Setup SBT
run: |
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/_docker_publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ jobs:
contents: read
packages: write
steps:
- name: Setup Java
uses: graalvm/setup-graalvm@v1
with:
java-version: '22'
distribution: 'graalvm-community'
- name: Checkout current branch
uses: actions/checkout@v4
with:
Expand Down
3 changes: 2 additions & 1 deletion backend/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ inThisBuild(
sbtdynver.DynVer.getGitDescribeOutput(d).mkVersion(versionFmt, fallbackVersion(d))
},
semanticdbEnabled := true,
semanticdbVersion := scalafixSemanticdb.revision
semanticdbVersion := scalafixSemanticdb.revision,
coverageEnabled := true
)
)

Expand Down
6 changes: 3 additions & 3 deletions backend/core/src/main/scala/chainless/ChainlessMain.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ object ChainlessMain extends ResourceApp.Forever {

override def run(args: List[String]): Resource[F, Unit] =
for {
_ <- GraalSupport.verifyCompatibility[F]
_ <- GraalSupport.verifyCompatibility[F].toResource
given Client[F] <- EmberClientBuilder.default[F].withTimeout(5.seconds).build
given Files[F] = Files.forIO
(args, _) <- IO
Expand All @@ -45,8 +45,8 @@ object ChainlessMain extends ResourceApp.Forever {
functionsDb <- SqlFunctionsDb.make[F](sqliteConnection)
functionInvocationsDb <- SqlFunctionInvocationsDb.make[F](sqliteConnection)
blocksDb <- SqlBlocksDb.make[F](sqliteConnection)
blocksStore = new BlocksStore[F](Path(args.dataDir) / "objects" / "blocks")
functionsStore = new FunctionsStore[F](Path(args.dataDir) / "objects" / "functions")
blocksStore = new DirBlocksStore[F](Path(args.dataDir) / "objects" / "blocks")
functionsStore = new DirFunctionsStore[F](Path(args.dataDir) / "objects" / "functions")
runnerOperator = new RunnerOperator[F](
blocksDb,
blocksStore,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
package chainless.db

import cats.effect.Async
import cats.effect.implicits.*
import cats.implicits.*
import chainless.models.*
import fs2.io.file.{Files, Path}
import fs2.{Chunk, Stream}

import java.nio.charset.StandardCharsets

class BlocksStore[F[_]: Async](baseDir: Path):
trait BlocksStore[F[_]]:
def saveBlock(block: BlockWithChain): F[Unit]
def getBlock(meta: BlockMeta): F[BlockWithChain]

class DirBlocksStore[F[_]: Async](baseDir: Path) extends BlocksStore[F]:
def saveBlock(block: BlockWithChain): F[Unit] =
Files[F].createDirectories(baseDir / block.meta.chain.name) >>
Stream(block.block)
Expand All @@ -28,16 +31,3 @@ class BlocksStore[F[_]: Async](baseDir: Path):
.map(io.circe.parser.parse)
.rethrow
.map(BlockWithChain(meta, _))

class FunctionsStore[F[_]: Async](baseDir: Path):
def get(id: String)(revision: Int): Stream[F, Byte] =
Files[F].readAll(baseDir / id / revision.toString)

def delete(id: String): F[Unit] =
Files[F].deleteRecursively(baseDir / id)

def save(id: String, revision: Int)(data: Stream[F, Byte]): F[Unit] =
Files[F].createDirectories(baseDir / id) >> data
.through(Files[F].writeAll(baseDir / id / revision.toString))
.compile
.drain
24 changes: 24 additions & 0 deletions backend/core/src/main/scala/chainless/db/FunctionsStore.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package chainless.db

import cats.effect.Async
import cats.implicits.*
import fs2.Stream
import fs2.io.file.{Files, Path}

trait FunctionsStore[F[_]]:
def get(id: String)(revision: Int): Stream[F, Byte]
def delete(id: String): F[Unit]
def save(id: String, revision: Int)(data: Stream[F, Byte]): F[Unit]

class DirFunctionsStore[F[_]: Async](baseDir: Path) extends FunctionsStore[F]:
def get(id: String)(revision: Int): Stream[F, Byte] =
Files[F].readAll(baseDir / id / revision.toString)

def delete(id: String): F[Unit] =
Files[F].deleteRecursively(baseDir / id)

def save(id: String, revision: Int)(data: Stream[F, Byte]): F[Unit] =
Files[F].createDirectories(baseDir / id) >> data
.through(Files[F].writeAll(baseDir / id / revision.toString))
.compile
.drain
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ object Unzip {
def apply[F[_]: Async](chunkSize: Int = chunkSize): Pipe[F, Byte, (String, Boolean, Stream[F, Byte])] = {

def entry(zis: ZipInputStream): OptionT[F, (String, Boolean, Stream[F, Byte])] =
OptionT(Sync[F].blocking(Option(zis.getNextEntry()))).map { ze =>
OptionT(Sync[F].blocking(Option(zis.getNextEntry))).map { ze =>
(ze.getName, ze.isDirectory, io.readInputStream[F](Async[F].delay(zis), chunkSize, closeAfterUse = false))
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
package chainless.runner.temporary

import cats.effect.{Async, Resource, Sync}
import cats.implicits.*
import io.circe.syntax.*
import io.circe.{Json, JsonNumber, JsonObject}
import org.graalvm.polyglot.proxy.*
import org.graalvm.polyglot.{Context, Value}

import java.util.concurrent.Executors
import scala.annotation.tailrec
import scala.concurrent.ExecutionContext
import scala.jdk.CollectionConverters.*

object GraalSupport:

extension (ec: ExecutionContext)
def eval[F[_]: Async, A](fa: F[A]): F[A] =
Async[F].evalOn(fa, ec)

def evalSync[F[_]: Async, A](a: => A): F[A] =
eval(Sync[F].delay(a))

def makeContext[F[_]: Async]: Resource[F, (ExecutionContext, Context)] =
Resource
.make(Async[F].delay(ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())))(ec =>
Async[F].delay(ec.shutdownNow)
)
.flatMap(executor =>
Resource
.make(executor.eval(Sync[F].delay(Context.newBuilder("js", "python").allowAllAccess(true).build())))(
context => executor.eval(Sync[F].blocking(context.close()))
)
.tupleLeft(executor)
)

def verifyCompatibility[F[_]: Async]: F[Unit] =
makeContext[F].use((ec, context) =>
for {
given Context <- context.pure[F]
j <- ec.evalSync {
val compiled = context.eval("js", compatibilityTestCode)
val res = compiled.execute(Json.obj("bar" -> 42.asJson).asValue)
val j = res.asJson
j
}
bar <- Sync[F].fromEither(j.as[Int])
_ <- Sync[F]
.raiseUnless(bar == 42)(new IllegalStateException("Unexpected JS Result"))
} yield ()
)

private val compatibilityTestCode =
"""
|(function(foo) {
| return foo.bar;
|})
|""".stripMargin

private def jsonFolder(using context: Context) =
new Json.Folder[Value] {
def onNull: Value = context.asValue(null)

def onBoolean(value: Boolean): Value = context.asValue(value)

def onNumber(value: JsonNumber): Value = context.asValue(value.toDouble)

def onString(value: String): Value = context.asValue(value)

def onArray(value: Vector[Json]): Value = {
val arr: Array[AnyRef] = new Array(value.size)
value.zipWithIndex.foreach { case (j, idx) =>
arr.update(idx, j.asValue)
}
val x = context.asValue(ProxyArray.fromArray(arr*))
x
}

def onObject(value: JsonObject): Value = {
val map = new java.util.HashMap[Object, Object](value.size)
value.toMap.foreach { case (key, value) =>
map.put(key, value.asValue)
}
context.asValue(ProxyHashMap.from(map))
}
}

extension (value: Value)
def asJson(using context: Context): Json =
if (value.isNull) Json.Null
else if (value.isBoolean) value.asBoolean().asJson
else if (value.isNumber) {
if (value.fitsInInt()) value.asInt().asJson
else if (value.fitsInLong()) value.asLong().asJson
else if (value.fitsInFloat()) value.asFloat().asJson
else value.asDouble().asJson
} else if (value.isString) value.asString().asJson
else if (value.hasArrayElements) Json.arr(value.asScalaIterator.map(_.asJson).toSeq*)
else if (value.hasHashEntries)
Json.obj(
value.asScalaMapIterator.map { case (k, v) =>
k.asString() -> v.asJson
}.toSeq*
)
else if (value.hasMembers) {
Json.obj(
value.getMemberKeys.asScala.map(key => key -> value.getMember(key).asJson).toSeq*
)
} else throw new MatchError(value)

@tailrec
def asScalaIterator(using context: Context): Iterator[Value] =
if (value.isIterator) {
Iterator.unfold(value)(i => Option.when(i.hasIteratorNextElement)(i.getIteratorNextElement -> i))
} else value.getIterator.asScalaIterator

@tailrec
def asScalaMapIterator(using context: Context): Iterator[(Value, Value)] =
if (value.isIterator) {
Iterator.unfold(value)(i =>
Option.when(i.hasIteratorNextElement) {
val arr = i.getIteratorNextElement

(arr.getArrayElement(0) -> arr.getArrayElement(1)) -> i
}
)
} else value.getHashEntriesIterator.asScalaMapIterator

extension (json: Json) def asValue(using context: Context): Value = json.foldWith(jsonFolder)
127 changes: 2 additions & 125 deletions backend/core/src/main/scala/chainless/runner/temporary/Runner.scala
Original file line number Diff line number Diff line change
@@ -1,19 +1,12 @@
package chainless.runner.temporary

import cats.NonEmptyParallel
import cats.effect.implicits.*
import cats.effect.{Async, Resource, Sync}
import cats.implicits.*
import cats.NonEmptyParallel
import chainless.models.{*, given}
import io.circe.syntax.*
import io.circe.{Json, JsonNumber, JsonObject}
import org.graalvm.polyglot.proxy.*
import org.graalvm.polyglot.{Context, Value}

import java.util.concurrent.Executors
import scala.annotation.tailrec
import scala.concurrent.ExecutionContext
import scala.jdk.CollectionConverters.*
import org.graalvm.polyglot.Context

/** A running instance of a temporary function. Applies each block to the current function.
*/
Expand Down Expand Up @@ -61,119 +54,3 @@ object LocalGraalRunner:
)
.guarantee(Async[F].cede)
)

object GraalSupport:

extension (ec: ExecutionContext)
def eval[F[_]: Async, A](fa: F[A]): F[A] =
Async[F].evalOn(fa, ec)

def evalSync[F[_]: Async, A](a: => A): F[A] =
eval(Sync[F].delay(a))

def makeContext[F[_]: Async]: Resource[F, (ExecutionContext, Context)] =
Resource
.make(Async[F].delay(ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())))(ec =>
Async[F].delay(ec.shutdownNow)
)
.flatMap(executor =>
Resource
.make(executor.eval(Sync[F].delay(Context.newBuilder("js", "python").allowAllAccess(true).build())))(
context => executor.eval(Sync[F].blocking(context.close()))
)
.tupleLeft(executor)
)

def verifyCompatibility[F[_]: Async]: Resource[F, Unit] =
for {
(ec, context) <- makeContext[F]
given Context = context
j <- ec.evalSync {
val compiled = context.eval("js", compatibilityTestCode)
val res = compiled.execute(Json.obj("bar" -> 42.asJson).asValue)
val j = res.asJson
j
}.toResource
bar <- Sync[F].fromEither(j.as[Int]).toResource
_ <- Sync[F]
.raiseUnless(bar == 42)(new IllegalStateException("Unexpected JS Result"))
.toResource
} yield ()

private val compatibilityTestCode =
"""
|(function(foo) {
| return foo.bar;
|})
|""".stripMargin

private def jsonFolder(using context: Context) =
new Json.Folder[Value] {
def onNull: Value = context.asValue(null)

def onBoolean(value: Boolean): Value = context.asValue(value)

def onNumber(value: JsonNumber): Value = context.asValue(value.toDouble)

def onString(value: String): Value = context.asValue(value)

def onArray(value: Vector[Json]): Value = {
val arr: Array[AnyRef] = new Array(value.size)
value.zipWithIndex.foreach { case (j, idx) =>
arr.update(idx, j.asValue)
}
val x = context.asValue(ProxyArray.fromArray(arr*))
x
}

def onObject(value: JsonObject): Value = {
val map = new java.util.HashMap[Object, Object](value.size)
value.toMap.foreach { case (key, value) =>
map.put(key, value.asValue)
}
context.asValue(ProxyHashMap.from(map))
}
}

extension (value: Value)
def asJson(using context: Context): Json =
if (value.isNull) Json.Null
else if (value.isBoolean) value.asBoolean().asJson
else if (value.isNumber) {
if (value.fitsInInt()) value.asInt().asJson
else if (value.fitsInLong()) value.asLong().asJson
else if (value.fitsInFloat()) value.asFloat().asJson
else value.asDouble().asJson
} else if (value.isString) value.asString().asJson
else if (value.hasArrayElements) Json.arr(value.asScalaIterator.map(_.asJson).toSeq*)
else if (value.hasHashEntries)
Json.obj(
value.asScalaMapIterator.map { case (k, v) =>
k.asString() -> v.asJson
}.toSeq*
)
else if (value.hasMembers) {
Json.obj(
value.getMemberKeys.asScala.map(key => key -> value.getMember(key).asJson).toSeq*
)
} else throw new MatchError(value)

@tailrec
def asScalaIterator(using context: Context): Iterator[Value] =
if (value.isIterator) {
Iterator.unfold(value)(i => Option.when(i.hasIteratorNextElement)(i.getIteratorNextElement -> i))
} else value.getIterator.asScalaIterator

@tailrec
def asScalaMapIterator(using context: Context): Iterator[(Value, Value)] =
if (value.isIterator) {
Iterator.unfold(value)(i =>
Option.when(i.hasIteratorNextElement) {
val arr = i.getIteratorNextElement

(arr.getArrayElement(0) -> arr.getArrayElement(1)) -> i
}
)
} else value.getHashEntriesIterator.asScalaMapIterator

extension (json: Json) def asValue(using context: Context): Value = json.foldWith(jsonFolder)
Loading