diff --git a/.gitignore b/.gitignore index 4fbd384afe6..4fc59463837 100644 --- a/.gitignore +++ b/.gitignore @@ -108,4 +108,35 @@ Composer-Setup.exe .metals/ .bloop/ .ammonite/ -metals.sbt \ No newline at end of file +metals.sbt + + +# Ignore Helm-specific artifacts in texera-helmchart +core/scripts/texera-helmchart/*.tgz +core/scripts/texera-helmchart/charts/ +core/scripts/texera-helmchart/tmpcharts/ +core/scripts/texera-helmchart/*.lock +core/scripts/texera-helmchart/test-output/ +core/scripts/texera-helmchart/tests/test-*.yaml +core/scripts/texera-helmchart/*.helmignore + +# Ignore general OS-specific files and editor files in texera-helmchart +core/scripts/texera-helmchart/.DS_Store +core/scripts/texera-helmchart/Thumbs.db +core/scripts/texera-helmchart/*.swp +core/scripts/texera-helmchart/*.swo +core/scripts/texera-helmchart/*.bak +core/scripts/texera-helmchart/*~ +core/scripts/texera-helmchart/.idea/ +core/scripts/texera-helmchart/.vscode/ +core/scripts/texera-helmchart/*.log + +# Temporary files in texera-helmchart +core/scripts/texera-helmchart/*.tmp +core/scripts/texera-helmchart/*.out +core/scripts/texera-helmchart/*.cache +core/scripts/texera-helmchart/*.pyc +core/scripts/texera-helmchart/__pycache__/ + +# Build artifacts in texera-helmchart +core/scripts/texera-helmchart/dist/ \ No newline at end of file diff --git a/core/amber/computing-unit.dockerfile b/core/amber/computing-unit.dockerfile new file mode 100644 index 00000000000..4d9dcd68a19 --- /dev/null +++ b/core/amber/computing-unit.dockerfile @@ -0,0 +1,40 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# Set working directory +WORKDIR /core + +# Copy all projects under core to /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +# Update system and install dependencies +RUN apt-get update && apt-get install -y \ + netcat \ + unzip \ + python3-pip \ + libpq-dev \ + && apt-get clean + +# Install Python dependencies +RUN pip3 install --upgrade pip setuptools wheel +RUN pip3 install python-lsp-server python-lsp-server[websockets] + +# Install requirements with a fallback for wordcloud +RUN pip3 install -r amber/requirements.txt +RUN pip3 install --no-cache-dir --find-links https://pypi.org/simple/ -r amber/operator-requirements.txt || \ + pip3 install --no-cache-dir wordcloud==1.9.2 + +# Additional setup +WORKDIR /core +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +# Build services +RUN scripts/build-services.sh + +# Set the default command +CMD ["scripts/workflow-computing-unit.sh"] + +# Expose the required port +EXPOSE 8085 \ No newline at end of file diff --git a/core/amber/src/main/resources/application.conf b/core/amber/src/main/resources/application.conf index acb296eecfe..06bae5fbbb9 100644 --- a/core/amber/src/main/resources/application.conf +++ b/core/amber/src/main/resources/application.conf @@ -18,7 +18,7 @@ network-buffering { } reconfiguration { - enable-transactional-reconfiguration = false + enable-transactional-reconfiguration = true } cache { @@ -27,10 +27,11 @@ cache { } user-sys { - enabled = false + enabled = true google { clientId = "" - smtp { + clientSecret = "" + smtp { gmail = "" password = "" } @@ -40,17 +41,17 @@ user-sys { exp-in-days = 30 # generate the secret again for each deployment using the following: # 'openssl rand -hex 16' or 'xxd -l16 -ps /dev/urandom' - 256-bit-secret = "8a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d" + 256-bit-secret = "5259e3eb6a58cb89b53790406c24c8f5" } } result-cleanup { - ttl-in-seconds = 86400 # time to live for a collection is 2 days - collection-check-interval-in-seconds = 86400 # 2 days + ttl-in-seconds = 3600 # time to live for a collection is 2 days + collection-check-interval-in-seconds = 3600 # 2 days } web-server { - workflow-state-cleanup-in-seconds = 30 + workflow-state-cleanup-in-seconds = 3600 python-console-buffer-size = 100 workflow-result-pulling-in-seconds = 3 clean-all-execution-results-on-server-start = false @@ -74,7 +75,7 @@ fault-tolerance { } schedule-generator { - enable-cost-based-schedule-generator = false + enable-cost-based-schedule-generator = true use-global-search = false use-top-down-search = false search-timeout-milliseconds = 1000 @@ -86,4 +87,4 @@ ai-assistant-server{ ai-service-key = "" # Put your Ai service url here (If you are using OpenAI, then the url should be "https://api.openai.com/v1") ai-service-url = "" -} \ No newline at end of file +} diff --git a/core/amber/src/main/resources/udf.conf b/core/amber/src/main/resources/udf.conf index d0452fdc594..82edfb235f5 100644 --- a/core/amber/src/main/resources/udf.conf +++ b/core/amber/src/main/resources/udf.conf @@ -1,6 +1,6 @@ python { # python3 executable path - path = "" + path = "/usr/bin/python3" log { streamHandler { diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberConfig.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberConfig.scala index 3c0f0b6b668..181b22db871 100644 --- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberConfig.scala +++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberConfig.scala @@ -2,9 +2,14 @@ package edu.uci.ics.amber.engine.common import akka.actor.Address import com.typesafe.config.{Config, ConfigFactory} +import org.eclipse.jgit.api.Git +import org.eclipse.jgit.revwalk.RevWalk +import org.eclipse.jgit.revwalk.filter.RevFilter import java.io.File import java.net.URI +import scala.io.Source +import scala.jdk.CollectionConverters.IteratorHasAsScala object AmberConfig { @@ -15,6 +20,35 @@ object AmberConfig { private var conf: Config = _ var masterNodeAddr: Address = Address("akka", "Amber", "localhost", 2552) + val lastDeployTimestamp:String = try { + // Read the timestamp string from the file + val source = Source.fromFile(Utils.amberHomePath.resolve("timestamp.txt").toString) + val timestamp = source.getLines().mkString + source.close() + // Print the timestamp + println(s"Timestamp read from file: $timestamp") + timestamp + } catch { + case e: Exception => + println(s"An error occurred: ${e.getMessage}") + "Timestamp unavailable" + } + + + + val latestCommitFromMaster = try { + // Read the timestamp string from the file + val source = Source.fromFile(Utils.amberHomePath.resolve("commit_title_before_merge.txt").toString) + val commitMsg = source.getLines().mkString + source.close() + // Print the timestamp + println(s"Commit message from file: $commitMsg") + commitMsg + } catch { + case e: Exception => + println(s"An error occurred: ${e.getMessage}") + "Commit message unavailable" + } // perform lazy reload private def getConfSource: Config = { diff --git a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala index e21a9eb0c09..1ad588ee4f6 100644 --- a/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala +++ b/core/amber/src/main/scala/edu/uci/ics/amber/engine/common/AmberRuntime.scala @@ -54,6 +54,7 @@ object AmberRuntime { akka.cluster.seed-nodes = [ "akka://Amber@$localIpAddress:2552" ] """) .withFallback(akkaConfig) + .resolve() AmberConfig.masterNodeAddr = createMasterAddress(localIpAddress) createAmberSystem(masterConfig) } @@ -75,6 +76,7 @@ object AmberRuntime { akka.cluster.seed-nodes = [ "akka://Amber@$addr:2552" ] """) .withFallback(akkaConfig) + .resolve() AmberConfig.masterNodeAddr = createMasterAddress(addr) createAmberSystem(workerConfig) } diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/hub/HubResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/hub/HubResource.scala index eceb3566804..d6868838ba1 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/hub/HubResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/hub/HubResource.scala @@ -3,15 +3,7 @@ package edu.uci.ics.texera.web.resource.dashboard.hub import edu.uci.ics.texera.dao.SqlServer import edu.uci.ics.texera.dao.jooq.generated.Tables._ import HubResource.{ - fetchDashboardDatasetsByDids, - fetchDashboardWorkflowsByWids, - getUserLCCount, - isLikedHelper, - recordLikeActivity, - recordUserActivity, - userRequest, - validateEntityType -} + fetchDashboardDatasetsByDids,fetchDashboardWorkflowsByWids, getUserLCCount, isLikedHelper, recordLikeActivity, recordUserActivity, userRequest, validateEntityType} import edu.uci.ics.texera.web.resource.dashboard.user.workflow.WorkflowResource.{ DashboardWorkflow, baseWorkflowSelect, @@ -31,6 +23,12 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.DatasetUserAccess.DATASET_US import edu.uci.ics.texera.dao.jooq.generated.tables.User.USER import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.{Dataset, DatasetUserAccess} import edu.uci.ics.texera.web.resource.dashboard.DashboardResource.DashboardClickableFileEntry +import edu.uci.ics.texera.web.resource.dashboard.user.dataset.DatasetResource.{ + DashboardDataset, + baseDatasetSelect, + mapDashboardDataset +} +import edu.uci.ics.amber.engine.common.AmberConfig import edu.uci.ics.texera.web.resource.dashboard.user.dataset.DatasetResource.DashboardDataset object HubResource { @@ -305,6 +303,20 @@ class HubResource { .getInstance() .createDSLContext() + @GET + @Path("/git-describe") + @Produces(Array(MediaType.TEXT_PLAIN)) + def getGitHead: String = { + AmberConfig.latestCommitFromMaster + } + + @GET + @Path("/last-deploy") + @Produces(Array(MediaType.TEXT_PLAIN)) + def getLastDeploy: String = { + AmberConfig.lastDeployTimestamp + } + @GET @Path("/count") def getPublishedWorkflowCount(@QueryParam("entityType") entityType: String): Integer = { diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 43e81bd5a3d..fc15739f400 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -3,12 +3,18 @@ package edu.uci.ics.texera.web.resource.dashboard.user.workflow import edu.uci.ics.amber.core.storage.VFSURIFactory.decodeURI import edu.uci.ics.amber.core.storage.result.ExecutionResourcesMapping import edu.uci.ics.amber.core.storage.{DocumentFactory, VFSResourceType, VFSURIFactory} +import edu.uci.ics.amber.core.storage.{DocumentFactory, VFSURIFactory} import edu.uci.ics.amber.core.tuple.Tuple import edu.uci.ics.amber.core.virtualidentity._ import edu.uci.ics.amber.core.workflow.PortIdentity import edu.uci.ics.amber.engine.architecture.logreplay.{ReplayDestination, ReplayLogRecord} import edu.uci.ics.amber.engine.common.AmberConfig import edu.uci.ics.amber.engine.common.storage.SequentialRecordStorage +import edu.uci.ics.amber.core.virtualidentity.{ChannelMarkerIdentity, ExecutionIdentity, OperatorIdentity, WorkflowIdentity} +import edu.uci.ics.amber.core.workflow.PortIdentity +import edu.uci.ics.amber.engine.architecture.logreplay.{ReplayDestination, ReplayLogRecord} +import edu.uci.ics.amber.engine.common.AmberConfig +import edu.uci.ics.amber.engine.common.storage.SequentialRecordStorage import edu.uci.ics.texera.dao.SqlServer import edu.uci.ics.texera.dao.jooq.generated.Tables._ import edu.uci.ics.texera.dao.jooq.generated.tables.daos.WorkflowExecutionsDao @@ -17,6 +23,7 @@ import edu.uci.ics.texera.web.auth.SessionUser import edu.uci.ics.texera.web.resource.dashboard.user.workflow.WorkflowExecutionsResource._ import edu.uci.ics.texera.web.service.ExecutionsMetadataPersistService import io.dropwizard.auth.Auth +import org.jooq.types.ULong import java.net.URI import java.sql.Timestamp diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowResource.scala index ddb46d998ae..79495876713 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowResource.scala @@ -3,15 +3,11 @@ package edu.uci.ics.texera.web.resource.dashboard.user.workflow import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.typesafe.scalalogging.LazyLogging +import edu.uci.ics.amber.engine.common.AmberConfig import edu.uci.ics.texera.dao.SqlServer import edu.uci.ics.texera.dao.jooq.generated.Tables._ import edu.uci.ics.texera.dao.jooq.generated.enums.PrivilegeEnum -import edu.uci.ics.texera.dao.jooq.generated.tables.daos.{ - WorkflowDao, - WorkflowOfProjectDao, - WorkflowOfUserDao, - WorkflowUserAccessDao -} +import edu.uci.ics.texera.dao.jooq.generated.tables.daos.{WorkflowDao, WorkflowOfProjectDao, WorkflowOfUserDao, WorkflowUserAccessDao} import edu.uci.ics.texera.dao.jooq.generated.tables.pojos._ import edu.uci.ics.texera.web.auth.SessionUser import edu.uci.ics.texera.web.resource.dashboard.hub.HubResource.recordCloneActivity @@ -51,6 +47,7 @@ object WorkflowResource { ) final private lazy val workflowOfProjectDao = new WorkflowOfProjectDao(context.configuration) + def getWorkflowName(wid: Integer): String = { val workflow = workflowDao.fetchOneByWid(wid) if (workflow == null) { diff --git a/core/amber/timestamp.txt b/core/amber/timestamp.txt new file mode 100644 index 00000000000..ff12768a1d6 --- /dev/null +++ b/core/amber/timestamp.txt @@ -0,0 +1 @@ +2025-02-11 17:03:49 diff --git a/core/amber/webserver.dockerfile b/core/amber/webserver.dockerfile new file mode 100644 index 00000000000..c249747caae --- /dev/null +++ b/core/amber/webserver.dockerfile @@ -0,0 +1,42 @@ +FROM node:18-alpine AS nodegui + +WORKDIR /gui +COPY core/gui/package.json core/gui/yarn.lock ./ +RUN apk add --no-cache python3 make g++ git +RUN corepack enable && corepack prepare yarn@4.5.1 --activate && yarn set version --yarn-path 4.5.1 +# Fake git-version.js during yarn install to prevent git from causing cache +# invalidation of dependencies +# Add tools required for building dependencies +RUN touch git-version.js && YARN_NODE_LINKER=node-modules yarn install + +COPY core/gui . +# Position of .git doesn't matter since it's only there for the revision hash +COPY .git ./.git +RUN apk add --no-cache git && \ + node git-version.js && \ + apk del git && \ + yarn run build + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +RUN apt-get update +RUN apt-get install -y netcat unzip python3-pip +#RUN pip3 install python-lsp-server python-lsp-server[websockets] +#RUN pip3 install -r amber/requirements.txt + +WORKDIR /core +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git +COPY --from=nodegui /gui/dist ./gui/dist + +RUN scripts/build-services.sh + +CMD ["scripts/server.sh"] + +EXPOSE 8080 diff --git a/core/build.sbt b/core/build.sbt index 74e873ce5a4..fad72025200 100644 --- a/core/build.sbt +++ b/core/build.sbt @@ -3,6 +3,14 @@ lazy val WorkflowCore = (project in file("workflow-core")) .dependsOn(DAO) .configs(Test) .dependsOn(DAO % "test->test") // test scope dependency +lazy val WorkflowComputingUnitManagingService = (project in file("workflow-computing-unit-managing-service")) + .dependsOn(WorkflowCore) + .settings( + dependencyOverrides ++= Seq( + // override it as io.dropwizard 4 require 2.16.1 or higher + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.17.0", +) + ) lazy val FileService = (project in file("file-service")) .dependsOn(WorkflowCore) .settings( @@ -47,7 +55,7 @@ lazy val WorkflowExecutionService = (project in file("amber")) // root project definition lazy val CoreProject = (project in file(".")) - .aggregate(DAO, WorkflowCore, FileService, WorkflowOperator, WorkflowCompilingService, WorkflowExecutionService) + .aggregate(DAO, WorkflowComputingUnitManagingService, WorkflowCore, FileService, WorkflowOperator, WorkflowCompilingService, WorkflowExecutionService) .settings( name := "core", version := "0.1.0", diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java index 02dbcb77bcf..693f4763078 100644 --- a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/Tables.java @@ -18,6 +18,7 @@ import edu.uci.ics.texera.dao.jooq.generated.tables.UserActivity; import edu.uci.ics.texera.dao.jooq.generated.tables.UserConfig; import edu.uci.ics.texera.dao.jooq.generated.tables.Workflow; +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowExecutions; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfProject; import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowOfUser; @@ -104,6 +105,11 @@ public class Tables { */ public static final Workflow WORKFLOW = Workflow.WORKFLOW; + /** + * The table texera_db.workflow_computing_unit. + */ + public static final WorkflowComputingUnit WORKFLOW_COMPUTING_UNIT = WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT; + /** * The table texera_db.workflow_executions. */ diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java new file mode 100644 index 00000000000..8cdb198aba5 --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/WorkflowComputingUnit.java @@ -0,0 +1,159 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables; + + +import edu.uci.ics.texera.dao.jooq.generated.Keys; +import edu.uci.ics.texera.dao.jooq.generated.TexeraDb; +import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowComputingUnitRecord; + +import java.sql.Timestamp; + +import org.jooq.Field; +import org.jooq.ForeignKey; +import org.jooq.Identity; +import org.jooq.Name; +import org.jooq.Record; +import org.jooq.Row5; +import org.jooq.Schema; +import org.jooq.Table; +import org.jooq.TableField; +import org.jooq.TableOptions; +import org.jooq.UniqueKey; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.jooq.impl.TableImpl; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnit extends TableImpl { + + private static final long serialVersionUID = 1L; + + /** + * The reference instance of texera_db.workflow_computing_unit + */ + public static final WorkflowComputingUnit WORKFLOW_COMPUTING_UNIT = new WorkflowComputingUnit(); + + /** + * The class holding records for this type + */ + @Override + public Class getRecordType() { + return WorkflowComputingUnitRecord.class; + } + + /** + * The column texera_db.workflow_computing_unit.uid. + */ + public final TableField UID = createField(DSL.name("uid"), SQLDataType.BIGINT.nullable(false), this, ""); + + /** + * The column texera_db.workflow_computing_unit.name. + */ + public final TableField NAME = createField(DSL.name("name"), SQLDataType.VARCHAR(128).nullable(false), this, ""); + + /** + * The column texera_db.workflow_computing_unit.cuid. + */ + public final TableField CUID = createField(DSL.name("cuid"), SQLDataType.INTEGER.nullable(false).identity(true), this, ""); + + /** + * The column texera_db.workflow_computing_unit.creation_time. + */ + public final TableField CREATION_TIME = createField(DSL.name("creation_time"), SQLDataType.TIMESTAMP(0).nullable(false).defaultValue(DSL.field("CURRENT_TIMESTAMP", SQLDataType.TIMESTAMP)), this, ""); + + /** + * The column texera_db.workflow_computing_unit.terminate_time. + */ + public final TableField TERMINATE_TIME = createField(DSL.name("terminate_time"), SQLDataType.TIMESTAMP(0), this, ""); + + private WorkflowComputingUnit(Name alias, Table aliased) { + this(alias, aliased, null); + } + + private WorkflowComputingUnit(Name alias, Table aliased, Field[] parameters) { + super(alias, null, aliased, parameters, DSL.comment(""), TableOptions.table()); + } + + /** + * Create an aliased texera_db.workflow_computing_unit table + * reference + */ + public WorkflowComputingUnit(String alias) { + this(DSL.name(alias), WORKFLOW_COMPUTING_UNIT); + } + + /** + * Create an aliased texera_db.workflow_computing_unit table + * reference + */ + public WorkflowComputingUnit(Name alias) { + this(alias, WORKFLOW_COMPUTING_UNIT); + } + + /** + * Create a texera_db.workflow_computing_unit table reference + */ + public WorkflowComputingUnit() { + this(DSL.name("workflow_computing_unit"), null); + } + + public WorkflowComputingUnit(Table child, ForeignKey key) { + super(child, key, WORKFLOW_COMPUTING_UNIT); + } + + @Override + public Schema getSchema() { + return aliased() ? null : TexeraDb.TEXERA_DB; + } + + @Override + public Identity getIdentity() { + return (Identity) super.getIdentity(); + } + + @Override + public UniqueKey getPrimaryKey() { + return Keys.WORKFLOW_COMPUTING_UNIT_PKEY; + } + + @Override + public WorkflowComputingUnit as(String alias) { + return new WorkflowComputingUnit(DSL.name(alias), this); + } + + @Override + public WorkflowComputingUnit as(Name alias) { + return new WorkflowComputingUnit(alias, this); + } + + /** + * Rename this table + */ + @Override + public WorkflowComputingUnit rename(String name) { + return new WorkflowComputingUnit(DSL.name(name), null); + } + + /** + * Rename this table + */ + @Override + public WorkflowComputingUnit rename(Name name) { + return new WorkflowComputingUnit(name, null); + } + + // ------------------------------------------------------------------------- + // Row5 type methods + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java new file mode 100644 index 00000000000..4a9d1b701ec --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/daos/WorkflowComputingUnitDao.java @@ -0,0 +1,131 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.daos; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; +import edu.uci.ics.texera.dao.jooq.generated.tables.records.WorkflowComputingUnitRecord; + +import java.sql.Timestamp; +import java.util.List; +import java.util.Optional; + +import org.jooq.Configuration; +import org.jooq.impl.DAOImpl; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnitDao extends DAOImpl { + + /** + * Create a new WorkflowComputingUnitDao without any configuration + */ + public WorkflowComputingUnitDao() { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit.class); + } + + /** + * Create a new WorkflowComputingUnitDao with an attached configuration + */ + public WorkflowComputingUnitDao(Configuration configuration) { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT, edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit.class, configuration); + } + + @Override + public Integer getId(edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit object) { + return object.getCuid(); + } + + /** + * Fetch records that have uid BETWEEN lowerInclusive AND + * upperInclusive + */ + public List fetchRangeOfUid(Long lowerInclusive, Long upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have uid IN (values) + */ + public List fetchByUid(Long... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID, values); + } + + /** + * Fetch records that have name BETWEEN lowerInclusive AND + * upperInclusive + */ + public List fetchRangeOfName(String lowerInclusive, String upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have name IN (values) + */ + public List fetchByName(String... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME, values); + } + + /** + * Fetch records that have cuid BETWEEN lowerInclusive AND + * upperInclusive + */ + public List fetchRangeOfCuid(Integer lowerInclusive, Integer upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have cuid IN (values) + */ + public List fetchByCuid(Integer... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, values); + } + + /** + * Fetch a unique record that has cuid = value + */ + public edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit fetchOneByCuid(Integer value) { + return fetchOne(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, value); + } + + /** + * Fetch a unique record that has cuid = value + */ + public Optional fetchOptionalByCuid(Integer value) { + return fetchOptional(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID, value); + } + + /** + * Fetch records that have creation_time BETWEEN lowerInclusive AND + * upperInclusive + */ + public List fetchRangeOfCreationTime(Timestamp lowerInclusive, Timestamp upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have creation_time IN (values) + */ + public List fetchByCreationTime(Timestamp... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME, values); + } + + /** + * Fetch records that have terminate_time BETWEEN lowerInclusive AND + * upperInclusive + */ + public List fetchRangeOfTerminateTime(Timestamp lowerInclusive, Timestamp upperInclusive) { + return fetchRange(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME, lowerInclusive, upperInclusive); + } + + /** + * Fetch records that have terminate_time IN (values) + */ + public List fetchByTerminateTime(Timestamp... values) { + return fetch(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME, values); + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java new file mode 100644 index 00000000000..8eed828b33b --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/interfaces/IWorkflowComputingUnit.java @@ -0,0 +1,82 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.interfaces; + + +import java.io.Serializable; +import java.sql.Timestamp; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public interface IWorkflowComputingUnit extends Serializable { + + /** + * Setter for texera_db.workflow_computing_unit.uid. + */ + public void setUid(Long value); + + /** + * Getter for texera_db.workflow_computing_unit.uid. + */ + public Long getUid(); + + /** + * Setter for texera_db.workflow_computing_unit.name. + */ + public void setName(String value); + + /** + * Getter for texera_db.workflow_computing_unit.name. + */ + public String getName(); + + /** + * Setter for texera_db.workflow_computing_unit.cuid. + */ + public void setCuid(Integer value); + + /** + * Getter for texera_db.workflow_computing_unit.cuid. + */ + public Integer getCuid(); + + /** + * Setter for texera_db.workflow_computing_unit.creation_time. + */ + public void setCreationTime(Timestamp value); + + /** + * Getter for texera_db.workflow_computing_unit.creation_time. + */ + public Timestamp getCreationTime(); + + /** + * Setter for texera_db.workflow_computing_unit.terminate_time. + */ + public void setTerminateTime(Timestamp value); + + /** + * Getter for texera_db.workflow_computing_unit.terminate_time. + */ + public Timestamp getTerminateTime(); + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + /** + * Load data from another generated Record/POJO implementing the common + * interface IWorkflowComputingUnit + */ + public void from(IWorkflowComputingUnit from); + + /** + * Copy data into another generated Record/POJO implementing the common + * interface IWorkflowComputingUnit + */ + public E into(E into); +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java new file mode 100644 index 00000000000..74ad01c4388 --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/pojos/WorkflowComputingUnit.java @@ -0,0 +1,162 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.pojos; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.interfaces.IWorkflowComputingUnit; + +import java.sql.Timestamp; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnit implements IWorkflowComputingUnit { + + private static final long serialVersionUID = 1L; + + private Long uid; + private String name; + private Integer cuid; + private Timestamp creationTime; + private Timestamp terminateTime; + + public WorkflowComputingUnit() {} + + public WorkflowComputingUnit(IWorkflowComputingUnit value) { + this.uid = value.getUid(); + this.name = value.getName(); + this.cuid = value.getCuid(); + this.creationTime = value.getCreationTime(); + this.terminateTime = value.getTerminateTime(); + } + + public WorkflowComputingUnit( + Long uid, + String name, + Integer cuid, + Timestamp creationTime, + Timestamp terminateTime + ) { + this.uid = uid; + this.name = name; + this.cuid = cuid; + this.creationTime = creationTime; + this.terminateTime = terminateTime; + } + + /** + * Getter for texera_db.workflow_computing_unit.uid. + */ + @Override + public Long getUid() { + return this.uid; + } + + /** + * Setter for texera_db.workflow_computing_unit.uid. + */ + @Override + public void setUid(Long uid) { + this.uid = uid; + } + + /** + * Getter for texera_db.workflow_computing_unit.name. + */ + @Override + public String getName() { + return this.name; + } + + /** + * Setter for texera_db.workflow_computing_unit.name. + */ + @Override + public void setName(String name) { + this.name = name; + } + + /** + * Getter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public Integer getCuid() { + return this.cuid; + } + + /** + * Setter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public void setCuid(Integer cuid) { + this.cuid = cuid; + } + + /** + * Getter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public Timestamp getCreationTime() { + return this.creationTime; + } + + /** + * Setter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public void setCreationTime(Timestamp creationTime) { + this.creationTime = creationTime; + } + + /** + * Getter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public Timestamp getTerminateTime() { + return this.terminateTime; + } + + /** + * Setter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public void setTerminateTime(Timestamp terminateTime) { + this.terminateTime = terminateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("WorkflowComputingUnit ("); + + sb.append(uid); + sb.append(", ").append(name); + sb.append(", ").append(cuid); + sb.append(", ").append(creationTime); + sb.append(", ").append(terminateTime); + + sb.append(")"); + return sb.toString(); + } + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + @Override + public void from(IWorkflowComputingUnit from) { + setUid(from.getUid()); + setName(from.getName()); + setCuid(from.getCuid()); + setCreationTime(from.getCreationTime()); + setTerminateTime(from.getTerminateTime()); + } + + @Override + public E into(E into) { + into.from(this); + return into; + } +} diff --git a/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java new file mode 100644 index 00000000000..1263be6fe48 --- /dev/null +++ b/core/dao/src/main/scala/edu/uci/ics/texera/dao/jooq/generated/tables/records/WorkflowComputingUnitRecord.java @@ -0,0 +1,302 @@ +/* + * This file is generated by jOOQ. + */ +package edu.uci.ics.texera.dao.jooq.generated.tables.records; + + +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit; +import edu.uci.ics.texera.dao.jooq.generated.tables.interfaces.IWorkflowComputingUnit; + +import java.sql.Timestamp; + +import org.jooq.Field; +import org.jooq.Record1; +import org.jooq.Record5; +import org.jooq.Row5; +import org.jooq.impl.UpdatableRecordImpl; + + +/** + * This class is generated by jOOQ. + */ +@SuppressWarnings({ "all", "unchecked", "rawtypes" }) +public class WorkflowComputingUnitRecord extends UpdatableRecordImpl implements Record5, IWorkflowComputingUnit { + + private static final long serialVersionUID = 1L; + + /** + * Setter for texera_db.workflow_computing_unit.uid. + */ + @Override + public void setUid(Long value) { + set(0, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.uid. + */ + @Override + public Long getUid() { + return (Long) get(0); + } + + /** + * Setter for texera_db.workflow_computing_unit.name. + */ + @Override + public void setName(String value) { + set(1, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.name. + */ + @Override + public String getName() { + return (String) get(1); + } + + /** + * Setter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public void setCuid(Integer value) { + set(2, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.cuid. + */ + @Override + public Integer getCuid() { + return (Integer) get(2); + } + + /** + * Setter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public void setCreationTime(Timestamp value) { + set(3, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.creation_time. + */ + @Override + public Timestamp getCreationTime() { + return (Timestamp) get(3); + } + + /** + * Setter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public void setTerminateTime(Timestamp value) { + set(4, value); + } + + /** + * Getter for texera_db.workflow_computing_unit.terminate_time. + */ + @Override + public Timestamp getTerminateTime() { + return (Timestamp) get(4); + } + + // ------------------------------------------------------------------------- + // Primary key information + // ------------------------------------------------------------------------- + + @Override + public Record1 key() { + return (Record1) super.key(); + } + + // ------------------------------------------------------------------------- + // Record5 type implementation + // ------------------------------------------------------------------------- + + @Override + public Row5 fieldsRow() { + return (Row5) super.fieldsRow(); + } + + @Override + public Row5 valuesRow() { + return (Row5) super.valuesRow(); + } + + @Override + public Field field1() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.UID; + } + + @Override + public Field field2() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.NAME; + } + + @Override + public Field field3() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CUID; + } + + @Override + public Field field4() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.CREATION_TIME; + } + + @Override + public Field field5() { + return WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME; + } + + @Override + public Long component1() { + return getUid(); + } + + @Override + public String component2() { + return getName(); + } + + @Override + public Integer component3() { + return getCuid(); + } + + @Override + public Timestamp component4() { + return getCreationTime(); + } + + @Override + public Timestamp component5() { + return getTerminateTime(); + } + + @Override + public Long value1() { + return getUid(); + } + + @Override + public String value2() { + return getName(); + } + + @Override + public Integer value3() { + return getCuid(); + } + + @Override + public Timestamp value4() { + return getCreationTime(); + } + + @Override + public Timestamp value5() { + return getTerminateTime(); + } + + @Override + public WorkflowComputingUnitRecord value1(Long value) { + setUid(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value2(String value) { + setName(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value3(Integer value) { + setCuid(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value4(Timestamp value) { + setCreationTime(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord value5(Timestamp value) { + setTerminateTime(value); + return this; + } + + @Override + public WorkflowComputingUnitRecord values(Long value1, String value2, Integer value3, Timestamp value4, Timestamp value5) { + value1(value1); + value2(value2); + value3(value3); + value4(value4); + value5(value5); + return this; + } + + // ------------------------------------------------------------------------- + // FROM and INTO + // ------------------------------------------------------------------------- + + @Override + public void from(IWorkflowComputingUnit from) { + setUid(from.getUid()); + setName(from.getName()); + setCuid(from.getCuid()); + setCreationTime(from.getCreationTime()); + setTerminateTime(from.getTerminateTime()); + } + + @Override + public E into(E into) { + into.from(this); + return into; + } + + // ------------------------------------------------------------------------- + // Constructors + // ------------------------------------------------------------------------- + + /** + * Create a detached WorkflowComputingUnitRecord + */ + public WorkflowComputingUnitRecord() { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT); + } + + /** + * Create a detached, initialised WorkflowComputingUnitRecord + */ + public WorkflowComputingUnitRecord(Long uid, String name, Integer cuid, Timestamp creationTime, Timestamp terminateTime) { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT); + + setUid(uid); + setName(name); + setCuid(cuid); + setCreationTime(creationTime); + setTerminateTime(terminateTime); + } + + /** + * Create a detached, initialised WorkflowComputingUnitRecord + */ + public WorkflowComputingUnitRecord(edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit value) { + super(WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT); + + if (value != null) { + setUid(value.getUid()); + setName(value.getName()); + setCuid(value.getCuid()); + setCreationTime(value.getCreationTime()); + setTerminateTime(value.getTerminateTime()); + } + } +} diff --git a/core/gui/proxy.config.json b/core/gui/proxy.config.json index a1f6d95ff54..487b292d4f9 100755 --- a/core/gui/proxy.config.json +++ b/core/gui/proxy.config.json @@ -4,6 +4,11 @@ "secure": false, "changeOrigin": true }, + "/api/computing-unit": { + "target": "http://localhost:9090", + "secure": false, + "changeOrigin": true + }, "/api/dataset": { "target": "http://localhost:9092", "secure": false, @@ -17,18 +22,18 @@ "/api": { "target": "http://localhost:8080", "secure": false, - "changeOrigin": false + "changeOrigin": true }, "/wsapi": { "target": "http://localhost:8085", "secure": false, - "changeOrigin": false, + "changeOrigin": true, "ws": true }, "/rtc": { "target": "http://localhost:1234", "ws": true, "secure": false, - "changeOrigin": false + "changeOrigin": true } } diff --git a/core/gui/src/app/app.module.ts b/core/gui/src/app/app.module.ts index dd5a27f80ee..a19a98e8c24 100644 --- a/core/gui/src/app/app.module.ts +++ b/core/gui/src/app/app.module.ts @@ -146,6 +146,7 @@ import { UserDatasetStagedObjectsListComponent } from "./dashboard/component/use import { NzEmptyModule } from "ng-zorro-antd/empty"; import { NzDividerModule } from "ng-zorro-antd/divider"; import { NzProgressModule } from "ng-zorro-antd/progress"; +import { ComputingUnitSelectionComponent } from "./workspace/component/power-button/computing-unit-selection.component"; registerLocaleData(en); @@ -235,6 +236,7 @@ registerLocaleData(en); BreakpointConditionInputComponent, CodeDebuggerComponent, HubSearchResultComponent, + ComputingUnitSelectionComponent, ], imports: [ BrowserModule, diff --git a/core/gui/src/app/hub/component/landing-page/landing-page.component.html b/core/gui/src/app/hub/component/landing-page/landing-page.component.html index 7be30b6b9c4..cf11ec385a3 100644 --- a/core/gui/src/app/hub/component/landing-page/landing-page.component.html +++ b/core/gui/src/app/hub/component/landing-page/landing-page.component.html @@ -1,7 +1,11 @@
-

Texera Hub

+

Texera Testing

+

Last Deploy: {{lastDeployTime}}

+

Merged Head Commit: + {{ commitPrefix }}{{ issueNumber }}{{ commitSuffix }} +

Join our community to explore public workflows, collaborate with others, and enhance your data analytics capabilities. Access diff --git a/core/gui/src/app/hub/component/landing-page/landing-page.component.ts b/core/gui/src/app/hub/component/landing-page/landing-page.component.ts index 04a62750688..caad8ce4bbb 100644 --- a/core/gui/src/app/hub/component/landing-page/landing-page.component.ts +++ b/core/gui/src/app/hub/component/landing-page/landing-page.component.ts @@ -19,6 +19,11 @@ import { UserService } from "../../../common/service/user/user.service"; styleUrls: ["./landing-page.component.scss"], }) export class LandingPageComponent implements OnInit { + public deploymentCommit: string = 'Commit unavailable'; + public commitPrefix: string = ''; + public issueNumber: string = ''; + public commitSuffix: string = ''; + public lastDeployTime: string = ""; public isLogin = this.userService.isLogin(); public currentUid = this.userService.getCurrentUser()?.uid; public workflowCount: number = 0; @@ -55,6 +60,20 @@ export class LandingPageComponent implements OnInit { } catch (error) { console.error("Failed to load top loved workflows:", error); } + this.hubService.getGitCommit().subscribe(commit =>{ + // Split the text into parts + this.deploymentCommit = commit + const match = this.deploymentCommit.match(/(.*)(#\d+)(.*)/); + if (match) { + this.commitPrefix = match[1]; + this.issueNumber = match[2]; + this.commitSuffix = match[3]; + } else { + this.commitPrefix = this.deploymentCommit; + } + + }) + this.hubService.getLastDeploy().subscribe(deployTime => this.lastDeployTime = deployTime) } getWorkflowCount(): void { diff --git a/core/gui/src/app/hub/service/hub.service.ts b/core/gui/src/app/hub/service/hub.service.ts index 609f2d6bd26..19291b48262 100644 --- a/core/gui/src/app/hub/service/hub.service.ts +++ b/core/gui/src/app/hub/service/hub.service.ts @@ -14,6 +14,14 @@ export class HubService { constructor(private http: HttpClient) {} + public getGitCommit(): Observable { + return this.http.get(`${this.BASE_URL}/git-describe`, { responseType: 'text' as 'json'}); + } + + public getLastDeploy(): Observable { + return this.http.get(`${this.BASE_URL}/last-deploy`, { responseType:'text' as 'json' }); + } + public getCount(entityType: string): Observable { return this.http.get(`${this.BASE_URL}/count`, { params: { entityType: entityType }, diff --git a/core/gui/src/app/workspace/component/menu/menu.component.html b/core/gui/src/app/workspace/component/menu/menu.component.html index 55c7b241464..33db9d917b8 100644 --- a/core/gui/src/app/workspace/component/menu/menu.component.html +++ b/core/gui/src/app/workspace/component/menu/menu.component.html @@ -307,7 +307,12 @@ nzType="ellipsis"> + + + +

+ diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss new file mode 100644 index 00000000000..ca50bc7dce3 --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.scss @@ -0,0 +1,33 @@ +nz-select { + width: 300px; +} + +nz-option { + height: 100%; +} + +.computing-unit-option { + display: flex; + align-items: center; + justify-content: space-between; + width: 100%; +} + +.unit-details { + display: flex; + align-items: center; + gap: 8px; + flex-grow: 1; +} + +.terminate-box { + width: 16px; /* Size of the red box */ + height: 16px; + background-color: red; + border-radius: 2px; /* Optional, if you want slightly rounded corners */ + cursor: pointer; /* Pointer cursor to indicate it's clickable */ +} + +.terminate-box:hover { + opacity: 0.8; /* Slight opacity on hover for visual feedback */ +} diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts new file mode 100644 index 00000000000..cd03abe8d8f --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.spec.ts @@ -0,0 +1,31 @@ +import { ComponentFixture, TestBed } from "@angular/core/testing"; +import { HttpClientTestingModule } from "@angular/common/http/testing"; +import { ComputingUnitSelectionComponent } from "./computing-unit-selection.component"; +import { NzButtonModule } from "ng-zorro-antd/button"; +import { CommonModule } from "@angular/common"; +import { NzIconModule } from "ng-zorro-antd/icon"; + +describe("PowerButtonComponent", () => { + let component: ComputingUnitSelectionComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ComputingUnitSelectionComponent], // Declare if not standalone + imports: [ + HttpClientTestingModule, // Use TestingModule instead of HttpClientModule + CommonModule, + NzButtonModule, + NzIconModule, + ], + }).compileComponents(); + + fixture = TestBed.createComponent(ComputingUnitSelectionComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it("should create", () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts new file mode 100644 index 00000000000..a01f9f362b8 --- /dev/null +++ b/core/gui/src/app/workspace/component/power-button/computing-unit-selection.component.ts @@ -0,0 +1,147 @@ +import { Component, Input, OnInit } from "@angular/core"; +import { interval } from "rxjs"; +import { switchMap } from "rxjs/operators"; +import { WorkflowComputingUnitManagingService } from "../../service/workflow-computing-unit/workflow-computing-unit-managing.service"; +import { DashboardWorkflowComputingUnit } from "../../types/workflow-computing-unit"; +import { NotificationService } from "../../../common/service/notification/notification.service"; +import { WorkflowWebsocketService } from "../../service/workflow-websocket/workflow-websocket.service"; +import { WorkflowActionService } from "../../service/workflow-graph/model/workflow-action.service"; +import { isDefined } from "../../../common/util/predicate"; +import { UntilDestroy, untilDestroyed } from "@ngneat/until-destroy"; +import {ActivatedRoute} from "@angular/router"; + +@UntilDestroy() +@Component({ + selector: "texera-computing-unit-selection", + templateUrl: "./computing-unit-selection.component.html", + styleUrls: ["./computing-unit-selection.component.scss"], +}) +export class ComputingUnitSelectionComponent implements OnInit { + @Input() + workflowId: number | undefined; + + selectedComputingUnit: DashboardWorkflowComputingUnit | null = null; + computingUnits: DashboardWorkflowComputingUnit[] = []; + private readonly REFRESH_INTERVAL_MS = 2000; + + constructor( + private computingUnitService: WorkflowComputingUnitManagingService, + private notificationService: NotificationService, + private workflowWebsocketService: WorkflowWebsocketService, + private route: ActivatedRoute, + ) {} + + ngOnInit(): void { + this.computingUnitService.listComputingUnits().subscribe({ + next: (units: DashboardWorkflowComputingUnit[]) => { + let firstRunningUnit = units.find(unit => unit.status === "Running") + if(firstRunningUnit){ + this.selectedComputingUnit = firstRunningUnit; + this.onComputingUnitChange(firstRunningUnit); + } + this.updateComputingUnits(units); + this.refreshComputingUnits(); + }, + error: (err: unknown) => console.error("Failed to fetch computing units:", err), + }) + } + + /** + * Periodically refresh the list of computing units. + */ + private refreshComputingUnits(): void { + interval(this.REFRESH_INTERVAL_MS) + .pipe( + switchMap(() => this.computingUnitService.listComputingUnits()), + untilDestroyed(this) + ) + .subscribe({ + next: (units: DashboardWorkflowComputingUnit[]) => this.updateComputingUnits(units), + error: (err: unknown) => console.error("Failed to fetch computing units:", err), + }); + } + + /** + * Update the computing units list, maintaining object references for the same CUID. + */ + private updateComputingUnits(newUnits: DashboardWorkflowComputingUnit[]): void { + const unitMap = new Map(this.computingUnits.map(unit => [unit.computingUnit.cuid, unit])); + + this.computingUnits = newUnits.map(newUnit => + unitMap.has(newUnit.computingUnit.cuid) + ? Object.assign(unitMap.get(newUnit.computingUnit.cuid)!, newUnit) + : newUnit + ); + + // If selected computing unit is removed, deselect it + if ( + this.selectedComputingUnit && + !this.computingUnits.some(unit => unit.computingUnit.cuid === this.selectedComputingUnit!.computingUnit.cuid) + ) { + this.selectedComputingUnit = null; + } + } + + /** + * Start a new computing unit. + */ + startComputingUnit(): void { + const computeUnitName = `Compute for Workflow ${this.workflowId}`; + this.computingUnitService + .createComputingUnit(computeUnitName) + .pipe(untilDestroyed(this)) + .subscribe({ + next: (unit: DashboardWorkflowComputingUnit) => { + this.notificationService.success("Successfully created the new compute unit"); + this.refreshComputingUnits(); + }, + error: (err: unknown) => this.notificationService.error("Failed to start computing unit"), + }); + } + + /** + * Terminate a computing unit. + * @param cuid The CUID of the unit to terminate. + */ + terminateComputingUnit(cuid: number): void { + const uri = this.computingUnits.find(unit => unit.computingUnit.cuid === cuid)?.uri; + + if (!uri) { + this.notificationService.error("Invalid computing unit URI."); + return; + } + + this.computingUnitService + .terminateComputingUnit(uri) + .pipe(untilDestroyed(this)) + .subscribe({ + next: (res: Response) => { + this.notificationService.success(`Terminated computing unit with URI: ${uri}`); + this.refreshComputingUnits(); + }, + error: (err: unknown) => this.notificationService.error("Failed to terminate computing unit"), + }); + } + + /** + * Called whenever the selected computing unit changes. + */ + onComputingUnitChange(newSelection: DashboardWorkflowComputingUnit | null): void { + console.log("Selected computing unit changed to:", newSelection); + const wid = this.route.snapshot.params.id; + if (newSelection && isDefined(wid)) { + console.log(`Selected Unit URI: ${newSelection.uri}`); + this.workflowWebsocketService.closeWebsocket() + this.workflowWebsocketService.openWebsocket(wid, undefined, newSelection.computingUnit.cuid); + } else { + console.log("No valid selection, keep the current websocket."); + } + } + + /** + * Get badge color based on the unit's status. + */ + getBadgeColor(status: string): string { + return status === "Running" ? "green" : "yellow"; + } +} diff --git a/core/gui/src/app/workspace/component/workspace.component.ts b/core/gui/src/app/workspace/component/workspace.component.ts index 50f944f2c53..ec7962d535b 100644 --- a/core/gui/src/app/workspace/component/workspace.component.ts +++ b/core/gui/src/app/workspace/component/workspace.component.ts @@ -53,7 +53,6 @@ export class WorkspaceComponent implements AfterViewInit, OnInit, OnDestroy { private undoRedoService: UndoRedoService, private workflowCacheService: WorkflowCacheService, private workflowPersistService: WorkflowPersistService, - private workflowWebsocketService: WorkflowWebsocketService, private workflowActionService: WorkflowActionService, private location: Location, private route: ActivatedRoute, @@ -107,11 +106,8 @@ export class WorkspaceComponent implements AfterViewInit, OnInit, OnDestroy { this.workflowActionService.resetAsNewWorkflow(); if (this.userSystemEnabled) { - this.registerReEstablishWebsocketUponWIdChange(); + this.onWIDChange(); this.updateViewCount(); - } else { - let wid = this.route.snapshot.params.id ?? 0; - this.workflowWebsocketService.openWebsocket(wid); } this.registerLoadOperatorMetadata(); @@ -126,7 +122,6 @@ export class WorkspaceComponent implements AfterViewInit, OnInit, OnDestroy { } this.codeEditorViewRef.clear(); - this.workflowWebsocketService.closeWebsocket(); this.workflowActionService.clearWorkflow(); } @@ -267,7 +262,7 @@ export class WorkspaceComponent implements AfterViewInit, OnInit, OnDestroy { }); } - registerReEstablishWebsocketUponWIdChange() { + onWIDChange() { this.workflowActionService .workflowMetaDataChanged() .pipe( @@ -278,7 +273,6 @@ export class WorkspaceComponent implements AfterViewInit, OnInit, OnDestroy { .pipe(untilDestroyed(this)) .subscribe((metadata: WorkflowMetadata) => { this.writeAccess = !metadata.readonly; - this.workflowWebsocketService.reopenWebsocket(metadata.wid as number); }); } diff --git a/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts new file mode 100644 index 00000000000..0188813bbf6 --- /dev/null +++ b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-computing-unit-managing.service.ts @@ -0,0 +1,53 @@ +import { Injectable } from "@angular/core"; +import { HttpClient } from "@angular/common/http"; +import { Observable } from "rxjs"; +import { AppSettings } from "../../../common/app-setting"; +import { DashboardWorkflowComputingUnit } from "../../types/workflow-computing-unit"; + +export const COMPUTING_UNIT_BASE_URL = "computing-unit"; +export const COMPUTING_UNIT_CREATE_URL = `${COMPUTING_UNIT_BASE_URL}/create`; +export const COMPUTING_UNIT_TERMINATE_URL = `${COMPUTING_UNIT_BASE_URL}/terminate`; +export const COMPUTING_UNIT_LIST_URL = `${COMPUTING_UNIT_BASE_URL}`; + +@Injectable({ + providedIn: "root", +}) +export class WorkflowComputingUnitManagingService { + constructor(private http: HttpClient) {} + + /** + * Create a new workflow computing unit (pod). + * @param name The name for the computing unit. + * @param unitType + * @returns An Observable of the created WorkflowComputingUnit. + */ + public createComputingUnit(name: string, unitType: string = "k8s_pod"): Observable { + const body = { name, unitType }; + + return this.http.post( + `${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_CREATE_URL}`, + body + ); + } + + /** + * Terminate a computing unit (pod) by its URI. + * @returns An Observable of the server response. + * @param uri + */ + public terminateComputingUnit(uri: string): Observable { + const body = { uri: uri, name: "dummy" }; + + return this.http.post(`${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_TERMINATE_URL}`, body); + } + + /** + * List all active computing units. + * @returns An Observable of a list of WorkflowComputingUnit. + */ + public listComputingUnits(): Observable { + return this.http.get( + `${AppSettings.getApiEndpoint()}/${COMPUTING_UNIT_LIST_URL}` + ); + } +} diff --git a/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-pod-brain.service.spec.ts b/core/gui/src/app/workspace/service/workflow-computing-unit/workflow-pod-brain.service.spec.ts new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts b/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts index e128684153c..8b1aed1b022 100644 --- a/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts +++ b/core/gui/src/app/workspace/service/workflow-websocket/workflow-websocket.service.ts @@ -13,6 +13,7 @@ import { delayWhen, filter, map, retryWhen, tap } from "rxjs/operators"; import { environment } from "../../../../environments/environment"; import { AuthService } from "../../../common/service/user/auth.service"; import { getWebsocketUrl } from "src/app/common/util/url"; +import { isDefined } from "../../../common/util/predicate"; export const WS_HEARTBEAT_INTERVAL_MS = 10000; export const WS_RECONNECT_INTERVAL_MS = 3000; @@ -21,11 +22,12 @@ export const WS_RECONNECT_INTERVAL_MS = 3000; providedIn: "root", }) export class WorkflowWebsocketService { - private static readonly TEXERA_WEBSOCKET_ENDPOINT = "wsapi/workflow-websocket"; + private static readonly TEXERA_WEBSOCKET_ENDPOINT = environment.envoyUrl; public isConnected: boolean = false; public numWorkers: number = -1; private connectedWid: number = 0; + private connectedCuid?: number; private websocket?: WebSocketSubject; private wsWithReconnectSubscription?: Subscription; @@ -63,16 +65,25 @@ export class WorkflowWebsocketService { public closeWebsocket() { this.wsWithReconnectSubscription?.unsubscribe(); this.websocket?.complete(); + this.connectedCuid = undefined; } - public openWebsocket(wId: number) { + public openWebsocket(wId: number, uId?: number, cuId?: number) { + if (uId == undefined) { + console.log(`uId is ${uId}, defaulting to uId = 1`); + uId = 1; + } const websocketUrl = getWebsocketUrl(WorkflowWebsocketService.TEXERA_WEBSOCKET_ENDPOINT, "") + "?wid=" + wId + + "&uid=" + + uId + + (isDefined(cuId) ? `&cuid=${cuId}` : "") + (environment.userSystemEnabled && AuthService.getAccessToken() !== null ? "&access-token=" + AuthService.getAccessToken() : ""); + console.log("websocketUrl", websocketUrl); this.websocket = webSocket(websocketUrl); // setup reconnection logic const wsWithReconnect = this.websocket.pipe( @@ -101,15 +112,7 @@ export class WorkflowWebsocketService { } this.isConnected = true; this.connectedWid = wId; + if (isDefined(cuId)) this.connectedCuid = cuId; }); } - - public reopenWebsocket(wId: number) { - if (this.isConnected && this.connectedWid === wId) { - // prevent reconnections - return; - } - this.closeWebsocket(); - this.openWebsocket(wId); - } } diff --git a/core/gui/src/app/workspace/types/workflow-computing-unit.ts b/core/gui/src/app/workspace/types/workflow-computing-unit.ts new file mode 100644 index 00000000000..238e8013606 --- /dev/null +++ b/core/gui/src/app/workspace/types/workflow-computing-unit.ts @@ -0,0 +1,13 @@ +export interface WorkflowComputingUnit { + cuid: number; + uid: number; + name: string; + creationTime: number; + terminateTime: number | undefined; +} + +export interface DashboardWorkflowComputingUnit { + computingUnit: WorkflowComputingUnit; + uri: string; + status: string; +} diff --git a/core/gui/src/environments/environment.default.ts b/core/gui/src/environments/environment.default.ts index 8d708f011a6..55bec7e7aab 100644 --- a/core/gui/src/environments/environment.default.ts +++ b/core/gui/src/environments/environment.default.ts @@ -12,6 +12,11 @@ export const defaultEnvironment = { */ apiUrl: "api", + /** + * root API of the envoy proxy + */ + envoyUrl: "wsapi/workflow-websocket", + /** * whether export execution result is supported */ @@ -26,7 +31,12 @@ export const defaultEnvironment = { /** * whether user system is enabled */ - userSystemEnabled: false, + userSystemEnabled: true, + + /** + * whether workflow computing unit system is enabled + */ + computingUnitSystemEnabled: true, /** * whether selecting files from datasets instead of the local file system. @@ -56,7 +66,7 @@ export const defaultEnvironment = { /** * whether workflow executions tracking feature is enabled */ - workflowExecutionsTrackingEnabled: false, + workflowExecutionsTrackingEnabled: true, /** * whether linkBreakpoint is supported diff --git a/core/gui/src/environments/environment.prod.ts b/core/gui/src/environments/environment.prod.ts index f29d7802cd2..5be1db1b798 100644 --- a/core/gui/src/environments/environment.prod.ts +++ b/core/gui/src/environments/environment.prod.ts @@ -3,4 +3,23 @@ import { AppEnv, defaultEnvironment } from "./environment.default"; export const environment: AppEnv = { ...defaultEnvironment, production: true, + + userSystemEnabled: true, + + localLogin: true, + + inviteOnly: true, + + exportExecutionResultEnabled: true, + + userPresetEnabled: true, + + productionSharedEditingServer: true, + + asyncRenderingEnabled: true, + + workflowExecutionsTrackingEnabled: true, + + singleFileUploadMaximumSizeMB: 20 + }; diff --git a/core/scripts/brain.sh b/core/scripts/brain.sh new file mode 100755 index 00000000000..3c925d740e5 --- /dev/null +++ b/core/scripts/brain.sh @@ -0,0 +1,2 @@ +cd workflow-pod-brain +target/universal/workflow-pod-brain-0.1.0-SNAPSHOT/bin/workflow-pod-brain server src/main/resources/config.yaml diff --git a/core/scripts/build-brain.sh b/core/scripts/build-brain.sh new file mode 100755 index 00000000000..7236ce1672b --- /dev/null +++ b/core/scripts/build-brain.sh @@ -0,0 +1,4 @@ +cd workflow-pod-brain +sbt clean dist +unzip target/universal/workflow-pod-brain-0.1.0-SNAPSHOT.zip -d target/universal/ +rm target/universal/workflow-pod-brain-0.1.0-SNAPSHOT.zip diff --git a/core/scripts/build-docker-image/webserver.sh b/core/scripts/build-docker-image/webserver.sh new file mode 100755 index 00000000000..e27be7d0c09 --- /dev/null +++ b/core/scripts/build-docker-image/webserver.sh @@ -0,0 +1 @@ + docker build -t shengqun/texera-webserver:dev-usersys --no-cache -f core/amber/webserver.dockerfile . diff --git a/core/scripts/build-docker-image/workflow-compiling-service.sh b/core/scripts/build-docker-image/workflow-compiling-service.sh new file mode 100755 index 00000000000..869c36b9566 --- /dev/null +++ b/core/scripts/build-docker-image/workflow-compiling-service.sh @@ -0,0 +1 @@ + docker build -t shengqun/texera-workflow-compiling-service:dev --no-cache -f core/workflow-compiling-service/workflow-compiling-service.dockerfile . diff --git a/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh b/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh new file mode 100755 index 00000000000..d09508b642b --- /dev/null +++ b/core/scripts/build-docker-image/workflow-computing-unit-managing-service.sh @@ -0,0 +1 @@ + docker build -t shengqun/texera-workflow-computing-unit-managing-service:dev --no-cache -f core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile . diff --git a/core/scripts/build-docker-image/workflow-computing-unit.sh b/core/scripts/build-docker-image/workflow-computing-unit.sh new file mode 100755 index 00000000000..7283fbea43e --- /dev/null +++ b/core/scripts/build-docker-image/workflow-computing-unit.sh @@ -0,0 +1 @@ + docker build -t shengqun/texera-workflow-computing-unit:dev --no-cache -f core/amber/computing-unit.dockerfile . diff --git a/core/scripts/build-services.sh b/core/scripts/build-services.sh index b18585ea795..eecb6271faf 100755 --- a/core/scripts/build-services.sh +++ b/core/scripts/build-services.sh @@ -5,5 +5,8 @@ rm workflow-compiling-service/target/universal/workflow-compiling-service-0.1.0. unzip file-service/target/universal/file-service-0.1.0.zip -d target/ rm file-service/target/universal/file-service-0.1.0.zip +unzip workflow-computing-unit-managing-service/target/universal/workflow-computing-unit-managing-service-0.1.0.zip -d target/ +rm workflow-computing-unit-managing-service/target/universal/workflow-computing-unit-managing-service-0.1.0.zip + unzip amber/target/universal/texera-0.1-SNAPSHOT.zip -d amber/target/ -rm amber/target/universal/texera-0.1-SNAPSHOT.zip +rm amber/target/universal/texera-0.1-SNAPSHOT.zip \ No newline at end of file diff --git a/core/scripts/deploy-brain.sh b/core/scripts/deploy-brain.sh new file mode 100755 index 00000000000..63682b67674 --- /dev/null +++ b/core/scripts/deploy-brain.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +# Start brain.sh in the background +bash scripts/brain.sh & +BRAIN_PID=$! # Store the PID of server.sh + +# Trap SIGTERM and SIGINT and forward them as SIGTERM to the background processes +trap 'kill -TERM $BRAIN_PID; wait $BRAIN_PID' SIGTERM SIGINT + +# Wait for brain.sh to complete +wait -n diff --git a/core/scripts/deploy-docker-trap.sh b/core/scripts/deploy-docker-trap.sh new file mode 100644 index 00000000000..c0d328c8963 --- /dev/null +++ b/core/scripts/deploy-docker-trap.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +# Start server.sh in the background +bash scripts/server.sh & +SERVER_PID=$! # Store the PID of server.sh + +# Wait for server.sh to start by sleeping for a brief period (adjust as needed) +sleep 5 + +# Check if server.sh is still running; if not, exit with an error +if ! ps -p $SERVER_PID > /dev/null; then + >&2 echo 'server.sh failed to start.' + exit 1 +fi + +# Start worker.sh in the background +bash scripts/worker.sh & +WORKER_PID=$! # Store the PID of worker.sh + +# Trap SIGTERM and SIGINT and forward them as SIGTERM to the background processes +trap 'kill -TERM $SERVER_PID; kill -TERM $WORKER_PID; wait $SERVER_PID $WORKER_PID' SIGTERM SIGINT + +# Wait for one of server.sh or worker.sh to complete +wait -n + +# # Send SIGTERM to both processes in case one exits early +# kill -TERM $SERVER_PID $WORKER_PID + +# # Give them time to shut down gracefully +# wait $SERVER_PID $WORKER_PID \ No newline at end of file diff --git a/core/scripts/server.sh b/core/scripts/server.sh index 61201d64e06..282b8cc5689 100755 --- a/core/scripts/server.sh +++ b/core/scripts/server.sh @@ -1,2 +1,3 @@ +#!/bin/bash cd amber target/texera-0.1-SNAPSHOT/bin/texera-web-application \ No newline at end of file diff --git a/core/scripts/texera-helmchart/Chart.yaml b/core/scripts/texera-helmchart/Chart.yaml new file mode 100644 index 00000000000..97b110bd8f2 --- /dev/null +++ b/core/scripts/texera-helmchart/Chart.yaml @@ -0,0 +1,34 @@ +apiVersion: v2 +name: texera-helm +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" + + +dependencies: + - name: ingress-nginx + version: 4.11.3 + repository: https://kubernetes.github.io/ingress-nginx + + - name: postgresql + version: "16.4.7" + repository: https://charts.bitnami.com/bitnami diff --git a/core/scripts/texera-helmchart/files/texera_ddl.sql b/core/scripts/texera-helmchart/files/texera_ddl.sql new file mode 100644 index 00000000000..a56362139aa --- /dev/null +++ b/core/scripts/texera-helmchart/files/texera_ddl.sql @@ -0,0 +1,342 @@ +-- ============================================ +-- 1. Drop and recreate the database (psql only) +-- Remove if you already created texera_db +-- ============================================ +\c postgres +DROP DATABASE IF EXISTS texera_db; +CREATE DATABASE texera_db; + +-- ============================================ +-- 2. Connect to the new database (psql only) +-- ============================================ +\c texera_db + +CREATE SCHEMA IF NOT EXISTS texera_db; +SET search_path TO texera_db, public; + +-- ============================================ +-- 3. Drop all tables if they exist +-- (CASCADE handles FK dependencies) +-- ============================================ +DROP TABLE IF EXISTS operator_executions CASCADE; +DROP TABLE IF EXISTS operator_port_executions CASCADE; +DROP TABLE IF EXISTS workflow_user_access CASCADE; +DROP TABLE IF EXISTS workflow_of_user CASCADE; +DROP TABLE IF EXISTS user_config CASCADE; +DROP TABLE IF EXISTS "user" CASCADE; +DROP TABLE IF EXISTS workflow CASCADE; +DROP TABLE IF EXISTS workflow_version CASCADE; +DROP TABLE IF EXISTS project CASCADE; +DROP TABLE IF EXISTS workflow_of_project CASCADE; +DROP TABLE IF EXISTS workflow_executions CASCADE; +DROP TABLE IF EXISTS dataset CASCADE; +DROP TABLE IF EXISTS dataset_user_access CASCADE; +DROP TABLE IF EXISTS dataset_version CASCADE; +DROP TABLE IF EXISTS public_project CASCADE; +DROP TABLE IF EXISTS project_user_access CASCADE; +DROP TABLE IF EXISTS workflow_user_likes CASCADE; +DROP TABLE IF EXISTS workflow_user_clones CASCADE; +DROP TABLE IF EXISTS workflow_view_count CASCADE; +DROP TABLE IF EXISTS workflow_user_activity CASCADE; +DROP TABLE IF EXISTS user_activity CASCADE; + +-- ============================================ +-- 4. Create PostgreSQL enum types +-- to mimic MySQL ENUM fields +-- ============================================ + +DROP TYPE IF EXISTS user_role_enum CASCADE; +DROP TYPE IF EXISTS privilege_enum CASCADE; + +CREATE TYPE user_role_enum AS ENUM ('INACTIVE', 'RESTRICTED', 'REGULAR', 'ADMIN'); +CREATE TYPE privilege_enum AS ENUM ('NONE', 'READ', 'WRITE'); + +-- ============================================ +-- 5. Create tables +-- ============================================ + +-- "user" table +CREATE TABLE IF NOT EXISTS "user" +( + uid SERIAL PRIMARY KEY, + name VARCHAR(256) NOT NULL, + email VARCHAR(256) UNIQUE, + password VARCHAR(256), + google_id VARCHAR(256) UNIQUE, + role user_role_enum NOT NULL DEFAULT 'INACTIVE', + google_avatar VARCHAR(100), + -- check that either password or google_id is not null + CONSTRAINT ck_nulltest CHECK ((password IS NOT NULL) OR (google_id IS NOT NULL)) + ); + +-- user_config +CREATE TABLE IF NOT EXISTS user_config +( + uid INT NOT NULL, + key VARCHAR(256) NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (uid, key), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE + ); + +-- workflow +CREATE TABLE IF NOT EXISTS workflow +( + wid SERIAL PRIMARY KEY, + name VARCHAR(128) NOT NULL, + description VARCHAR(500), + content TEXT NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_modified_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + is_public BOOLEAN NOT NULL DEFAULT false + ); + +-- workflow_of_user +CREATE TABLE IF NOT EXISTS workflow_of_user +( + uid INT NOT NULL, + wid INT NOT NULL, + PRIMARY KEY (uid, wid), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- workflow_user_access +CREATE TABLE IF NOT EXISTS workflow_user_access +( + uid INT NOT NULL, + wid INT NOT NULL, + privilege privilege_enum NOT NULL DEFAULT 'NONE', + PRIMARY KEY (uid, wid), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- workflow_version +CREATE TABLE IF NOT EXISTS workflow_version +( + vid SERIAL PRIMARY KEY, + wid INT NOT NULL, + content TEXT NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- project +CREATE TABLE IF NOT EXISTS project +( + pid SERIAL PRIMARY KEY, + name VARCHAR(128) NOT NULL, + description VARCHAR(10000), + owner_id INT NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + color VARCHAR(6), + UNIQUE (owner_id, name), + FOREIGN KEY (owner_id) REFERENCES "user"(uid) ON DELETE CASCADE + ); + +-- workflow_of_project +CREATE TABLE IF NOT EXISTS workflow_of_project +( + wid INT NOT NULL, + pid INT NOT NULL, + PRIMARY KEY (wid, pid), + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE, + FOREIGN KEY (pid) REFERENCES project(pid) ON DELETE CASCADE + ); + +-- project_user_access +CREATE TABLE IF NOT EXISTS project_user_access +( + uid INT NOT NULL, + pid INT NOT NULL, + privilege privilege_enum NOT NULL DEFAULT 'NONE', + PRIMARY KEY (uid, pid), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE, + FOREIGN KEY (pid) REFERENCES project(pid) ON DELETE CASCADE + ); + +-- workflow_executions +CREATE TABLE IF NOT EXISTS workflow_executions +( + eid SERIAL PRIMARY KEY, + vid INT NOT NULL, + uid INT NOT NULL, + status SMALLINT NOT NULL DEFAULT 1, + result TEXT, + starting_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_update_time TIMESTAMP, + bookmarked BOOLEAN DEFAULT FALSE, + name VARCHAR(128) NOT NULL DEFAULT 'Untitled Execution', + environment_version VARCHAR(128) NOT NULL, + log_location TEXT, + runtime_stats_uri TEXT, + FOREIGN KEY (vid) REFERENCES workflow_version(vid) ON DELETE CASCADE, + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE + ); + +-- public_project +CREATE TABLE IF NOT EXISTS public_project +( + pid INT PRIMARY KEY, + uid INT, + FOREIGN KEY (pid) REFERENCES project(pid) ON DELETE CASCADE + -- Note: MySQL schema doesn't define a foreign key for uid + ); + +-- dataset +CREATE TABLE IF NOT EXISTS dataset +( + did SERIAL PRIMARY KEY, + owner_uid INT NOT NULL, + name VARCHAR(128) NOT NULL, + is_public BOOLEAN NOT NULL DEFAULT TRUE, + description VARCHAR(512) NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (owner_uid) REFERENCES "user"(uid) ON DELETE CASCADE + ); + +-- dataset_user_access +CREATE TABLE IF NOT EXISTS dataset_user_access +( + did INT NOT NULL, + uid INT NOT NULL, + privilege privilege_enum NOT NULL DEFAULT 'NONE', + PRIMARY KEY (did, uid), + FOREIGN KEY (did) REFERENCES dataset(did) ON DELETE CASCADE, + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE + ); + +-- dataset_version +CREATE TABLE IF NOT EXISTS dataset_version +( + dvid SERIAL PRIMARY KEY, + did INT NOT NULL, + creator_uid INT NOT NULL, + name VARCHAR(128) NOT NULL, + version_hash VARCHAR(64) NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (did) REFERENCES dataset(did) ON DELETE CASCADE + ); + +-- operator_executions (modified to match MySQL: no separate primary key; added console_messages_uri) +CREATE TABLE IF NOT EXISTS operator_executions +( + workflow_execution_id INT NOT NULL, + operator_id VARCHAR(100) NOT NULL, + console_messages_uri TEXT, + UNIQUE (workflow_execution_id, operator_id), + FOREIGN KEY (workflow_execution_id) REFERENCES workflow_executions(eid) ON DELETE CASCADE + ); + +-- operator_port_executions (replaces the old operator_runtime_statistics) +CREATE TABLE IF NOT EXISTS operator_port_executions +( + workflow_execution_id INT NOT NULL, + operator_id VARCHAR(100) NOT NULL, + port_id INT NOT NULL, + result_uri TEXT, + UNIQUE (workflow_execution_id, operator_id, port_id), + FOREIGN KEY (workflow_execution_id) REFERENCES workflow_executions(eid) ON DELETE CASCADE + ); + +-- workflow_user_likes +CREATE TABLE IF NOT EXISTS workflow_user_likes +( + uid INT NOT NULL, + wid INT NOT NULL, + PRIMARY KEY (uid, wid), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- workflow_user_clones +CREATE TABLE IF NOT EXISTS workflow_user_clones +( + uid INT NOT NULL, + wid INT NOT NULL, + PRIMARY KEY (uid, wid), + FOREIGN KEY (uid) REFERENCES "user"(uid) ON DELETE CASCADE, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- workflow_view_count +CREATE TABLE IF NOT EXISTS workflow_view_count +( + wid INT NOT NULL PRIMARY KEY, + view_count INT NOT NULL DEFAULT 0, + FOREIGN KEY (wid) REFERENCES workflow(wid) ON DELETE CASCADE + ); + +-- Drop old workflow_user_activity (if any), replace with user_activity +-- user_activity +CREATE TABLE IF NOT EXISTS user_activity +( + uid INT NOT NULL DEFAULT 0, + id INT NOT NULL, + type VARCHAR(15) NOT NULL, + ip VARCHAR(15), + activate VARCHAR(10) NOT NULL, + activity_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + ); + +-- ============================================ +-- 6. Approximate FULLTEXT indexes with GIN +-- to mirror MySQL FULLTEXT +-- ============================================ +-- (Requires "pg_trgm" extension for more advanced usage.) + +CREATE INDEX idx_workflow_name_description_content + ON workflow + USING GIN ( + to_tsvector('english', + COALESCE(name, '') || ' ' || + COALESCE(description, '') || ' ' || + COALESCE(content, '') + ) + ); + +CREATE INDEX idx_user_name + ON "user" + USING GIN ( + to_tsvector('english', + COALESCE(name, '') + ) + ); + +CREATE INDEX idx_user_project_name_description + ON project + USING GIN ( + to_tsvector('english', + COALESCE(name, '') || ' ' || + COALESCE(description, '') + ) + ); + +CREATE INDEX idx_dataset_name_description + ON dataset + USING GIN ( + to_tsvector('english', + COALESCE(name, '') || ' ' || + COALESCE(description, '') + ) + ); + +CREATE INDEX idx_dataset_version_name + ON dataset_version + USING GIN ( + to_tsvector('english', + COALESCE(name, '') + ) + ); + +CREATE TABLE IF NOT EXISTS workflow_computing_unit ( + uid BIGINT NOT NULL, + name VARCHAR(128) NOT NULL, + cuid INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + terminate_time TIMESTAMP DEFAULT NULL + ); + +-- Done! +-- You now have a "texera_db" database schema matching the MySQL version as closely as possible. \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-config.yaml b/core/scripts/texera-helmchart/templates/envoy-config.yaml new file mode 100644 index 00000000000..938c0a6e113 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-config.yaml @@ -0,0 +1,74 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: envoy-config + namespace: {{ .Values.namespace }} +data: + envoy.yaml: | + static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 10000 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: ingress_http + upgrade_configs: + - upgrade_type: websocket + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: + prefix: "/wsapi" + route: + cluster: dynamic_service + prefix_rewrite: "/wsapi" + http_filters: + - name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inline_code: | + function envoy_on_request(request_handle) + local uri = request_handle:headers():get(":path") + local cuid = string.match(uri, "cuid=(%d+)") + if cuid then + local new_host = "computing-unit-" .. cuid .. ".workflow-computing-unit-svc.texera-dev.svc.cluster.local:8085" + request_handle:headers():replace(":authority", new_host) + end + end + - name: envoy.filters.http.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.dynamic_forward_proxy.v3.FilterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + + access_log: + - name: envoy.access_loggers.stdout + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + + clusters: + - name: dynamic_service + connect_timeout: 0.25s + lb_policy: CLUSTER_PROVIDED + cluster_type: + name: envoy.clusters.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.clusters.dynamic_forward_proxy.v3.ClusterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-deployment.yaml b/core/scripts/texera-helmchart/templates/envoy-deployment.yaml new file mode 100644 index 00000000000..a5c5aedd9d4 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-envoy-deployment + namespace: {{ .Values.namespace }} +spec: + replicas: {{ .Values.envoy.replicas | default 1 }} + selector: + matchLabels: + app: envoy + template: + metadata: + labels: + app: envoy + spec: + containers: + - name: envoy + image: "{{ .Values.envoy.image.repository }}:{{ .Values.envoy.image.tag }}" + ports: + - containerPort: {{ .Values.envoy.port}} + volumeMounts: + - name: envoy-config + mountPath: /etc/envoy + readOnly: true + args: + - "-c" + - "/etc/envoy/envoy.yaml" # Specify the path to the configuration file + # - "--log-level debug" # Set level of logging + volumes: + - name: envoy-config + configMap: + name: envoy-config # Reference the ConfigMap created earlier \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/envoy-service.yaml b/core/scripts/texera-helmchart/templates/envoy-service.yaml new file mode 100644 index 00000000000..b4c23b9f186 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/envoy-service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: envoy-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.envoy.service.type }} + selector: + app: envoy + ports: + - protocol: TCP + port: {{ .Values.envoy.service.port }} + targetPort: {{ .Values.envoy.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.envoy.service.type "NodePort" }} + nodePort: {{ .Values.envoy.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/ingress.yaml b/core/scripts/texera-helmchart/templates/ingress.yaml new file mode 100644 index 00000000000..af7ca6c210b --- /dev/null +++ b/core/scripts/texera-helmchart/templates/ingress.yaml @@ -0,0 +1,30 @@ +{{- if .Values.ingressPaths.enabled }} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ .Release.Name }}-ingress + namespace: {{ .Values.namespace }} + annotations: + cert-manager.io/issuer: {{ .Values.certManager.issuer }} + # nginx.ingress.kubernetes.io/rewrite-target: /$1 + nginx.ingress.kubernetes.io/use-regex: "true" +spec: + ingressClassName: rke2-ingress-nginx + rules: + - host: {{ .Values.ingressPaths.hostname }} + http: + paths: + {{- range .Values.ingressPaths.paths }} + - path: {{ .path }} + pathType: Prefix + backend: + service: + name: {{ .serviceName }} + port: + number: {{ .servicePort }} + {{- end }} + tls: + - hosts: + - {{ .Values.ingressPaths.hostname }} + secretName: {{ .Values.tlsSecretName }} +{{- end }} diff --git a/core/scripts/texera-helmchart/templates/webserver-deployment.yaml b/core/scripts/texera-helmchart/templates/webserver-deployment.yaml new file mode 100644 index 00000000000..c9c2b1193e9 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-deployment.yaml @@ -0,0 +1,37 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.webserver.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} +spec: + replicas: {{ .Values.webserver.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + spec: + volumes: + - name: {{ .Values.webserver.name }}-pv + persistentVolumeClaim: + claimName: {{ .Values.webserver.name }}-pvc + containers: + - name: {{ .Values.webserver.name }} + image: {{ .Values.webserver.imageName }} + imagePullPolicy: {{ .Values.webserver.imagePullPolicy }} + ports: + - containerPort: {{ .Values.webserver.service.port }} + env: + - name: JDBC_URL + value: "jdbc:postgresql://{{ .Release.Name }}-postgresql.{{ .Values.namespace }}.svc.cluster.local:5432/{{ .Values.global.postgresql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.global.postgresql.auth.user }} + - name: JDBC_PASSWORD + value: {{ .Values.global.postgresql.auth.password }} + volumeMounts: + - name: {{ .Values.webserver.name }}-pv + mountPath: {{ .Values.webserver.volume.mountPath }} # Specify the mount path in the container diff --git a/core/scripts/texera-helmchart/templates/webserver-pvc.yaml b/core/scripts/texera-helmchart/templates/webserver-pvc.yaml new file mode 100644 index 00000000000..7a652ee533b --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-pvc.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ .Values.webserver.name }}-pvc + namespace: {{ .Values.namespace }} +spec: + accessModes: {{ toYaml .Values.webserver.volume.accessModes | nindent 2 }} + resources: + requests: + storage: {{ .Values.webserver.volume.size }} + storageClassName: {{ .Values.webserver.volume.storageClassName }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/webserver-service.yaml b/core/scripts/texera-helmchart/templates/webserver-service.yaml new file mode 100644 index 00000000000..c8e6a98ab23 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/webserver-service.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.webserver.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.webserver.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} + ports: + - name: api-port + protocol: TCP + port: {{ .Values.webserver.service.port }} + targetPort: {{ .Values.webserver.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.webserver.service.type "NodePort" }} + nodePort: {{ .Values.webserver.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml b/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml new file mode 100644 index 00000000000..6103a5109b3 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-compiling-service-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} +spec: + replicas: {{ .Values.workflowCompilingService.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + spec: + containers: + - name: {{ .Values.workflowCompilingService.name }} + image: {{ .Values.workflowCompilingService.imageName }} + imagePullPolicy: {{ .Values.workflowCompilingService.imagePullPolicy }} + ports: + - containerPort: {{ .Values.workflowCompilingService.service.port }} + env: + - name: JDBC_URL + value: "jdbc:postgresql://{{ .Release.Name }}-postgresql.{{ .Values.namespace }}.svc.cluster.local:5432/{{ .Values.global.postgresql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.global.postgresql.auth.user }} + - name: JDBC_PASSWORD + value: {{ .Values.global.postgresql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} diff --git a/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml b/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml new file mode 100644 index 00000000000..c377418299b --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-compiling-service-service.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowCompilingService.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.workflowCompilingService.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.workflowCompilingService.name }} + ports: + - name: api-port + protocol: TCP + port: {{ .Values.workflowCompilingService.service.port }} + targetPort: {{ .Values.workflowCompilingService.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.workflowCompilingService.service.type "NodePort" }} + nodePort: {{ .Values.workflowCompilingService.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml new file mode 100644 index 00000000000..18e45bb20db --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-deployment.yaml @@ -0,0 +1,33 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} +spec: + replicas: {{ .Values.workflowComputingUnitManager.numOfPods }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + spec: + serviceAccountName: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + containers: + - name: {{ .Values.workflowComputingUnitManager.name }} + image: {{ .Values.workflowComputingUnitManager.imageName }} + imagePullPolicy: {{ .Values.workflowComputingUnitManager.imagePullPolicy }} + ports: + - containerPort: {{ .Values.workflowComputingUnitManager.service.port }} + env: + - name: JDBC_URL + value: "jdbc:postgresql://{{ .Release.Name }}-postgresql.{{ .Values.namespace }}.svc.cluster.local:5432/{{ .Values.global.postgresql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.global.postgresql.auth.user }} + - name: JDBC_PASSWORD + value: {{ .Values.global.postgresql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml new file mode 100644 index 00000000000..28a4530690a --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service-account.yaml @@ -0,0 +1,34 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + namespace: {{ .Values.namespace }} + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: {{ .Values.workflowComputingUnitManager.name }} + namespace: {{ .Values.namespace }} +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: ["get", "list", "watch", "create", "delete"] + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: ["get", "list", "watch", "create", "delete"] + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ .Values.workflowComputingUnitManager.name }}-binding + namespace: {{ .Values.namespace }} +subjects: + - kind: ServiceAccount + name: {{ .Values.workflowComputingUnitManager.serviceAccountName }} + namespace: {{ .Values.namespace }} +roleRef: + kind: Role + name: {{ .Values.workflowComputingUnitManager.name }} + apiGroup: rbac.authorization.k8s.io diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml new file mode 100644 index 00000000000..f90b1ee4e95 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-unit-manager-service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowComputingUnitManager.name }}-svc + namespace: {{ .Values.namespace }} +spec: + type: {{ .Values.workflowComputingUnitManager.service.type }} + selector: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnitManager.name }} + ports: + - protocol: TCP + port: {{ .Values.workflowComputingUnitManager.service.port }} + targetPort: {{ .Values.workflowComputingUnitManager.service.port }} + # if service type is set to NodePort, include nodePort attribute + {{- if eq .Values.workflowComputingUnitManager.service.type "NodePort" }} + nodePort: {{ .Values.workflowComputingUnitManager.service.nodePort }} + {{- end }} \ No newline at end of file diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml new file mode 100644 index 00000000000..71345583299 --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-units-namespace.yaml @@ -0,0 +1,6 @@ +{{- if .Values.createNamespaces }} +apiVersion: v1 +kind: Namespace +metadata: + name: {{ .Values.namespace }} +{{- end }} diff --git a/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml b/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml new file mode 100644 index 00000000000..df8c54731af --- /dev/null +++ b/core/scripts/texera-helmchart/templates/workflow-computing-units-service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.workflowComputingUnitPool.name }}-svc + namespace: {{ .Values.namespace }} +spec: + clusterIP: None + selector: + type: computing-unit # TODO: consider change this + ports: + - protocol: TCP + port: {{ .Values.workflowComputingUnitPool.service.port }} + targetPort: {{ .Values.workflowComputingUnitPool.service.targetPort }} diff --git a/core/scripts/texera-helmchart/values.yaml b/core/scripts/texera-helmchart/values.yaml new file mode 100644 index 00000000000..3b213546f11 --- /dev/null +++ b/core/scripts/texera-helmchart/values.yaml @@ -0,0 +1,145 @@ +namespace: texera-dev + +createNamespaces: false + +# Part 1: the configuration of mongodb and mysql +# We use external charts from bitnami, and pass the values stated below to them. +# For bitnami/mysql, check out: https://artifacthub.io/packages/helm/bitnami/mysql +# For bitnami/mongodb, check out: https://artifacthub.io/packages/helm/bitnami/mongodb +# +#mysqlPv: +# name: mysql-pv +# size: 10Gi +# path: /mnt/data/mysql # the path on the node that contains the pv +# node: minikube # the node that contains the pv +global: + namespaceOverride: texera-dev + enabled: true + storageClass: "nfs-client" + defaultStorageClass: "nfs-client" + postgresql: + auth: + database: "texera_db" + username: "texera" + password: "texera" +image: + debug: true +primary: + initdb: + scripts: # currently not working, need manual initialization. + init.sql: | + {{ .Files.Get "files/texera_ddl.sql" | indent 4 }} + +webserver: + name: webserver + numOfPods: 1 # Number of pods for the Texera deployment + imageName: shengqun/texera-webserver:dev-usersys # image name of the texera + imagePullPolicy: Always + service: + type: NodePort # for testing purpose, NodePort mode is fine + port: 8080 # port of the pod + nodePort: 30081 # exposed port + volume: + enabled: true + size: 10Gi + mountPath: /core/amber/user-resources # Path inside the container + storageClassName: nfs-client + accessModes: + - ReadWriteMany + +workflowComputingUnitManager: + name: workflow-computing-unit-manager + numOfPods: 1 + serviceAccountName: workflow-computing-unit-manager-service-account + imageName: shengqun/texera-workflow-computing-unit-managing-service:dev + imagePullPolicy: Always + service: + type: NodePort # for testing purpose, NodePort mode is fine + port: 8888 # port of the pod + nodePort: 30082 # exposed port + +workflowCompilingService: + name: workflow-compiling-service + numOfPods: 1 + imageName: shengqun/texera-workflow-compiling-service:dev + imagePullPolicy: Always + service: + type: NodePort + port: 9090 + nodePort: 30083 + +workflowComputingUnit: + name: workflow-computing-unit + numOfPods: 1 + imageName: shengqun/texera-workflow-computing-unit:dev + imagePullPolicy: Always + +# Config required for envoy and workflow pods +envoy: + replicas: 1 + image: + repository: envoyproxy/envoy + tag: v1.31-latest + port: + 10000 + debug: false + service: + type: NodePort + port: 10000 + nodePort: 30084 + +# headless service +# each pod's url is: computing-unit-%cuid.workflow-computing-unit.workflow-computing-unit-pool.svc.cluster.local +workflowComputingUnitPool: + name: workflow-computing-unit + service: + port: 8085 + targetPort: 8085 + + +# Ingress dependency configs +ingress-nginx: + controller: + replicaCount: 1 + service: + type: NodePort + nodePorts: + http: 30080 + ingressClassResource: + name: rke2-ingress-nginx + enabled: true + resources: + limits: + cpu: 100m + memory: 128Mi + requests: + cpu: 100m + memory: 128Mi + rbac: + create: true + + +# Custom Ingress resource configs +ingressPaths: + enabled: true + hostname: "staging.texera.io" + paths: + - path: /api/computing-unit + serviceName: workflow-computing-unit-manager-svc + servicePort: 8888 + - path: /api/compile + serviceName: workflow-compiling-service-svc + servicePort: 9090 + - path: /wsapi/workflow-websocket + serviceName: envoy-svc + servicePort: 10000 + - path: /api + serviceName: webserver-svc + servicePort: 8080 + - path: / + serviceName: webserver-svc + servicePort: 8080 + +certManager: + issuer: letsencrypt-prod +tlsSecretName: staging-tls-secret diff --git a/core/scripts/webserver-pv.yaml b/core/scripts/webserver-pv.yaml new file mode 100644 index 00000000000..e3248632098 --- /dev/null +++ b/core/scripts/webserver-pv.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: PersistentVolume +metadata: + name: {{ .Values.webserver.name }}-pv + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.webserver.name }} +spec: + capacity: + storage: {{ .Values.webserver.volume.size }} + accessModes: {{ toYaml .Values.webserver.volume.accessModes | nindent 2 }} + persistentVolumeReclaimPolicy: Delete + storageClassName: {{ .Values.webserver.volume.storageClassName }} + volumeMode: Filesystem diff --git a/core/scripts/worker.sh b/core/scripts/worker.sh new file mode 100755 index 00000000000..3148b5ee5de --- /dev/null +++ b/core/scripts/worker.sh @@ -0,0 +1,17 @@ +cd amber +case $# in + 1) + echo "running worker with cluster mode" + sbt "runMain edu.uci.ics.texera.web.TexeraRunWorker --serverAddr $1" + ;; + + 2) + echo "running worker with cluster mode and specific memory allocation = $2 MB" + sbt -mem $2 "runMain edu.uci.ics.texera.web.TexeraRunWorker --serverAddr $1" + ;; + + *) + echo "running worker with local mode" + sbt "runMain edu.uci.ics.texera.web.TexeraRunWorker" + ;; +esac \ No newline at end of file diff --git a/core/scripts/workflow-compiling-service.sh b/core/scripts/workflow-compiling-service.sh index 297035a557e..6e4942ac423 100755 --- a/core/scripts/workflow-compiling-service.sh +++ b/core/scripts/workflow-compiling-service.sh @@ -1 +1,2 @@ +#!/bin/bash target/workflow-compiling-service-0.1.0/bin/workflow-compiling-service \ No newline at end of file diff --git a/core/scripts/workflow-computing-unit-managing-service.sh b/core/scripts/workflow-computing-unit-managing-service.sh new file mode 100755 index 00000000000..100ceabb053 --- /dev/null +++ b/core/scripts/workflow-computing-unit-managing-service.sh @@ -0,0 +1,2 @@ +#!/bin/bash +target/workflow-computing-unit-managing-service-0.1.0/bin/workflow-computing-unit-managing-service \ No newline at end of file diff --git a/core/scripts/workflow-computing-unit.sh b/core/scripts/workflow-computing-unit.sh index f722b933cb4..ab7419a5881 100755 --- a/core/scripts/workflow-computing-unit.sh +++ b/core/scripts/workflow-computing-unit.sh @@ -1,3 +1,4 @@ +#!/bin/bash cd amber if [ ! -z $1 ] then diff --git a/core/scripts/workflow-computing-units-deployment.yaml b/core/scripts/workflow-computing-units-deployment.yaml new file mode 100644 index 00000000000..8954fb92758 --- /dev/null +++ b/core/scripts/workflow-computing-units-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + namespace: {{ .Values.namespace }} + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} +spec: + replicas: {{ .Values.workflowComputingUnit.numOfPods | default 1 }} + selector: + matchLabels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + template: + metadata: + labels: + app: {{ .Release.Name }}-{{ .Values.workflowComputingUnit.name }} + spec: + containers: + - name: {{ .Values.workflowComputingUnit.name }} + image: {{ .Values.workflowComputingUnit.imageName }} + imagePullPolicy: {{ .Values.workflowComputingUnit.imagePullPolicy }} + ports: + - containerPort: {{ .Values.workflowComputingUnitPool.service.port }} + env: + - name: JDBC_URL + value: "jdbc:mysql://{{ .Release.Name }}-mysql.{{ .Values.namespace }}.svc.cluster.local:3306/{{ .Values.mysql.auth.database }}" + - name: JDBC_USERNAME + value: {{ .Values.mysql.auth.username }} + - name: JDBC_PASSWORD + value: {{ .Values.mysql.auth.password }} +{{/* - name: MONGODB_URL*/}} +{{/* value: "mongodb://{{ .Release.Name }}-mongodb.{{ .Release.Namespace }}.svc.cluster.local:27017/{{ index .Values.mongodb.auth.databases 0 }}"*/}} diff --git a/core/workflow-compiling-service/workflow-compiling-service.dockerfile b/core/workflow-compiling-service/workflow-compiling-service.dockerfile new file mode 100644 index 00000000000..50339888a0c --- /dev/null +++ b/core/workflow-compiling-service/workflow-compiling-service.dockerfile @@ -0,0 +1,17 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* + +RUN apt-get update && apt-get install -y unzip + +# build the service +WORKDIR /core +RUN scripts/build-services.sh + +CMD ["scripts/workflow-compiling-service.sh"] + +EXPOSE 9090 \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/README.md b/core/workflow-computing-unit-managing-service/README.md new file mode 100644 index 00000000000..6958f77116d --- /dev/null +++ b/core/workflow-computing-unit-managing-service/README.md @@ -0,0 +1,62 @@ +# Workflow Pod Brain + +## Architecture + +![](./architecture.JPG) + +## GUI Design and User Experience +A power button is added next to the `Run` button. +![image](https://github.com/user-attachments/assets/afce0224-fe3b-4fde-a953-bc3810a86c63) + +By default, no pod is running. When user click on this power button, the button turns into a spinning wheel before the pod is ready. +![image](https://github.com/user-attachments/assets/6c5c04fc-12c0-45f8-ba74-7bd05425242f) +![image](https://github.com/user-attachments/assets/c955288c-f0e1-4984-b6b8-2207ec3b9273) +Now the pod is ready, the button becomes a terminate button, when user click it, the pod will be terminated. After termination, the button becomes power button again. +![image](https://github.com/user-attachments/assets/25d0ea02-628b-47ca-9b9e-7ed27de1ae7b) +![image](https://github.com/user-attachments/assets/f230d545-defe-478d-8f1e-238abe22ac6f) + +### Involved GUI codes + +The component for the workflow's workspace menu is: `core/gui/src/app/workspace/component/menu`, you will need to modify this component to add such button. Please try to make this button a **standalone** component, and let the existing `menu` component to import the power button you wrote. This can keep the code clean. + +For sending request to the backend from the frontend, you may add a new `service` in the gui(e.g. `workflow-computing-unit-managing.service.ts`). You can refer to other existing services under `core/gui/src/app/workspace/service` to create a new one just for the workflow pod brain. + +## Structure Overview + +### Configuration Files +Under `src/main/resources/`, there are two configuration files: +- application.conf: define the kubernetes config and the mysql connection config +- config.yaml: define the web application's config + +All configuration itmes are written by `src/main/scala/config/ApplicationConf.scala`, you can directly use `config` object to access them in the application codes. + +### Application codes + +All application codes are under `src/main/scala` +- WorkflowPodBrainApplication.scala: the main launcher the of the workflow pod app +- service/KubernetesClientService.scala(TODO): the encapsulation of Kubernetes pod creation/deletion/query logics +- web/ + - model: contains the sql related connection, and classes generated by jooq to manipulate/query tables in DB + - resources: the RESTful endpoints of the application. `HelloWorldResource` is the hello world endpoint, `WorkflowPodBrainResource`(TODO) are endpoints for workflow-pod related logics + +## How to get started + +### Dependencies +Similar to texera project, the `Java 11`, `sbt` are required + +### Local development setup + +Intellij is highly recommended. You can re-use the IDE setup of the texera project. + +### Launch the App + +To launch the application, build and run `core/workflow-pod-brain/src/main/scala/WorkflowPodBrainApplication.scala`, below is the setup in Intellij: +![](./idea-config.png) + + +## How to collaborate + +1. Checkout from this branch, `workflow-pod` +2. If you have changes ready to review, submit the PR from `your-dev-branch` to `workflow-pod` through Github. The review is done via Github +3. If `workflow-pod` is updated, Jiadong will send the slack message, and you can update your local `workflow-pod` branch and rebase `your-dev-branch` on top of `workflow-pod` +4. If any changes need to be made in `workflow-pod`, feel free to DM/email Jiadong(jiadongb@uci.edu) diff --git a/core/workflow-computing-unit-managing-service/architecture.JPG b/core/workflow-computing-unit-managing-service/architecture.JPG new file mode 100644 index 00000000000..ac5b78a78b6 Binary files /dev/null and b/core/workflow-computing-unit-managing-service/architecture.JPG differ diff --git a/core/workflow-computing-unit-managing-service/build.sbt b/core/workflow-computing-unit-managing-service/build.sbt new file mode 100644 index 00000000000..a4924ac3314 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/build.sbt @@ -0,0 +1,33 @@ +import scala.collection.Seq + +name := "workflow-computing-unit-managing-service" +organization := "edu.uci.ics" +version := "0.1.0" + +enablePlugins(JavaAppPackaging) + +ThisBuild / version := "0.1.0-SNAPSHOT" +ThisBuild / scalaVersion := "2.13.12" + +// Dependency Versions +val dropwizardVersion = "4.0.7" + +// Dependencies +libraryDependencies ++= Seq( + "io.dropwizard" % "dropwizard-core" % dropwizardVersion, + "io.kubernetes" % "client-java" % "21.0.0", + "org.jooq" % "jooq" % "3.14.16", + "com.typesafe" % "config" % "1.4.1", + "mysql" % "mysql-connector-java" % "8.0.33", + "com.softwaremill.sttp.client4" %% "core" % "4.0.0-M6", + "com.lihaoyi" %% "upickle" % "3.1.0", + "com.typesafe" % "config" % "1.4.2" +) + +// Compiler Options +Compile / scalacOptions ++= Seq( + "-Xelide-below", "WARNING", + "-feature", + "-deprecation", + "-Ywarn-unused:imports" +) \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/idea-config.png b/core/workflow-computing-unit-managing-service/idea-config.png new file mode 100644 index 00000000000..c507fbc7381 Binary files /dev/null and b/core/workflow-computing-unit-managing-service/idea-config.png differ diff --git a/core/workflow-computing-unit-managing-service/project/build.properties b/core/workflow-computing-unit-managing-service/project/build.properties new file mode 100644 index 00000000000..49214c4bb46 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/project/build.properties @@ -0,0 +1 @@ +sbt.version = 1.9.9 diff --git a/core/workflow-computing-unit-managing-service/project/plugins.sbt b/core/workflow-computing-unit-managing-service/project/plugins.sbt new file mode 100644 index 00000000000..ebebbb50109 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.16") \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml b/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml new file mode 100644 index 00000000000..126f1bca597 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/envoy-configmap.yaml @@ -0,0 +1,75 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: envoy-config + namespace: {{ .Values.namespace }} +data: + envoy.yaml: | + static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 10000 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: ingress_http + upgrade_configs: + - upgrade_type: websocket + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: + prefix: "/wsapi/workflow-websocket" + route: + cluster: dynamic_service + prefix_rewrite: "/wsapi/workflow-websocket" + http_filters: + - name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inline_code: | + function envoy_on_request(request_handle) + local uri = request_handle:headers():get(":path") + local wid = string.match(uri, "wid=(%d+)") + local uid = string.match(uri, "uid=(%d+)") + if uid then + local new_host = "user-pod-" .. uid .. "-" .. wid .. ".workflow-pods.wf-pod-pool.svc.cluster.local:8080" + request_handle:headers():replace(":authority", new_host) + end + end + - name: envoy.filters.http.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.dynamic_forward_proxy.v3.FilterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + + access_log: + - name: envoy.access_loggers.stdout + typed_config: + "@type": type.googleapis.com/envoy.extensions.access_loggers.stream.v3.StdoutAccessLog + + clusters: + - name: dynamic_service + connect_timeout: 0.25s + lb_policy: CLUSTER_PROVIDED + cluster_type: + name: envoy.clusters.dynamic_forward_proxy + typed_config: + "@type": type.googleapis.com/envoy.extensions.clusters.dynamic_forward_proxy.v3.ClusterConfig + dns_cache_config: + name: dynamic_dns_cache + dns_lookup_family: V4_ONLY + dns_refresh_rate: 1s \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml b/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml new file mode 100644 index 00000000000..d38bad35113 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/envoy-deployment.yaml @@ -0,0 +1,32 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: envoy-deployment + namespace: {{ .Values.namespace }} +spec: + replicas: 1 + selector: + matchLabels: + app: envoy + template: + metadata: + labels: + app: envoy + spec: + containers: + - name: envoy + image: envoyproxy/envoy:v1.31-latest # Use the latest stable version + ports: + - containerPort: 10000 # Expose port 10000 for traffic + volumeMounts: + - name: envoy-config + mountPath: /etc/envoy + readOnly: true + args: + - "-c" + - "/etc/envoy/envoy.yaml" # Specify the path to the configuration file + # - "--log-level debug" # Set level of logging + volumes: + - name: envoy-config + configMap: + name: envoy-config # Reference the ConfigMap created earlier \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql b/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql new file mode 100644 index 00000000000..fff3a735ac8 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/texera_workflow_pod.sql @@ -0,0 +1,12 @@ +\c texera_db + +CREATE TABLE IF NOT EXISTS pod ( + uid BIGINT NOT NULL, + wid BIGINT NOT NULL, + name VARCHAR(128) NOT NULL, + pod_uid VARCHAR(128) NOT NULL, + creation_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + terminate_time TIMESTAMP DEFAULT NULL, + CONSTRAINT pod_pk PRIMARY KEY (pod_uid), + CONSTRAINT fk_pod_uid FOREIGN KEY (uid) REFERENCES "user"(uid) + ); diff --git a/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml new file mode 100644 index 00000000000..ae27e9524c3 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_namespaces.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: wf-pod-brain +--- +apiVersion: v1 +kind: Namespace +metadata: + name: wf-pod-pool diff --git a/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml new file mode 100644 index 00000000000..2f2df901c99 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/scripts/workflow_pods_service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: workflow-pods + namespace: wf-pod-pool +spec: + clusterIP: None + selector: + workflow: worker + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/resources/application.conf b/core/workflow-computing-unit-managing-service/src/main/resources/application.conf new file mode 100644 index 00000000000..3809d83fe45 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/resources/application.conf @@ -0,0 +1,12 @@ +kubernetes { + compute-unit-pool-namespace = "texera-dev" + compute-unit-pool-namespace = ${?KUBERNETES_COMPUTE_UNIT_POOL_NAMESPACE} + + compute-unit-service-name = "workflow-computing-unit-svc" + compute-unit-service-name = ${?KUBERNETES_COMPUTE_UNIT_SERVICE_NAME} + + image-name = "shengqun/texera-workflow-computing-unit:dev" + image-name = ${?KUBERNETES_IMAGE_NAME} + + port-num = 8085 +} \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml b/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml new file mode 100644 index 00000000000..176bcc2f963 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/resources/workflow-computing-unit-managing-service-config.yaml @@ -0,0 +1,13 @@ +server: + applicationConnectors: + - type: http + port: 8888 + + adminConnectors: + - type: http + port: 8082 + +logging: + level: INFO + loggers: + "com.example": DEBUG \ No newline at end of file diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala b/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala new file mode 100644 index 00000000000..e0b8e31d1a0 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/config/WorkflowComputingUnitManagingServiceConf.scala @@ -0,0 +1,15 @@ +package config + +import com.typesafe.config.{Config, ConfigFactory} + +object WorkflowComputingUnitManagingServiceConf { + + // Load the configuration + private val conf: Config = ConfigFactory.load() + + // Access the Kubernetes settings with environment variable fallback + val computeUnitServiceName: String = conf.getString("kubernetes.compute-unit-service-name") + val computeUnitPoolNamespace: String = conf.getString("kubernetes.compute-unit-pool-namespace") + val computeUnitImageName: String = conf.getString("kubernetes.image-name") + val computeUnitPortNumber: Int = conf.getInt("kubernetes.port-num") +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala new file mode 100644 index 00000000000..9d8df4b8538 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingService.scala @@ -0,0 +1,48 @@ +package edu.uci.ics.texera.service + +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import edu.uci.ics.amber.core.storage.StorageConfig +import edu.uci.ics.amber.util.PathUtils.workflowComputingUnitManagingServicePath +import edu.uci.ics.texera.dao.SqlServer +import edu.uci.ics.texera.service.resource.WorkflowComputingUnitManagingResource +import io.dropwizard.core.setup.{Bootstrap, Environment} +import io.dropwizard.core.Application + +class WorkflowComputingUnitManagingService + extends Application[WorkflowComputingUnitManagingServiceConfiguration] { + + override def initialize( + bootstrap: Bootstrap[WorkflowComputingUnitManagingServiceConfiguration] + ): Unit = { + // register scala module to dropwizard default object mapper + bootstrap.getObjectMapper.registerModule(DefaultScalaModule) + } + override def run( + configuration: WorkflowComputingUnitManagingServiceConfiguration, + environment: Environment + ): Unit = { + SqlServer.initConnection( + StorageConfig.jdbcUrl, + StorageConfig.jdbcUsername, + StorageConfig.jdbcPassword + ) + + // Register http resources + environment.jersey.setUrlPattern("/api/*") + environment.jersey().register(new WorkflowComputingUnitManagingResource) + } +} + +object WorkflowComputingUnitManagingService { + def main(args: Array[String]): Unit = { + val configFilePath = workflowComputingUnitManagingServicePath + .resolve("src") + .resolve("main") + .resolve("resources") + .resolve("workflow-computing-unit-managing-service-config.yaml") + .toAbsolutePath + .toString + + new WorkflowComputingUnitManagingService().run("server", configFilePath) + } +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala new file mode 100644 index 00000000000..fbf431f10f7 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/WorkflowComputingUnitManagingServiceConfiguration.scala @@ -0,0 +1,5 @@ +package edu.uci.ics.texera.service + +import io.dropwizard.core.Configuration + +class WorkflowComputingUnitManagingServiceConfiguration extends Configuration {} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala new file mode 100644 index 00000000000..82bdf0627b8 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/resource/WorkflowComputingUnitManagingResource.scala @@ -0,0 +1,142 @@ +package edu.uci.ics.texera.service.resource + +import edu.uci.ics.amber.core.storage.StorageConfig +import edu.uci.ics.texera.dao.SqlServer +import edu.uci.ics.texera.dao.SqlServer.withTransaction +import edu.uci.ics.texera.dao.jooq.generated.tables.WorkflowComputingUnit.WORKFLOW_COMPUTING_UNIT +import edu.uci.ics.texera.dao.jooq.generated.tables.daos.WorkflowComputingUnitDao +import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowComputingUnit +import edu.uci.ics.texera.service.resource.WorkflowComputingUnitManagingResource.{DashboardWorkflowComputingUnit, WorkflowComputingUnitCreationParams, WorkflowComputingUnitTerminationParams, context} +import edu.uci.ics.texera.service.util.KubernetesClientService +import jakarta.ws.rs._ +import jakarta.ws.rs.core.{MediaType, Response} +import org.jooq.DSLContext + +import java.sql.Timestamp + +object WorkflowComputingUnitManagingResource { + + private lazy val context: DSLContext = SqlServer + .getInstance() + .createDSLContext() + + case class WorkflowComputingUnitCreationParams(name: String, unitType: String) + + case class WorkflowComputingUnitTerminationParams(uri: String, name: String) + + case class DashboardWorkflowComputingUnit( + computingUnit: WorkflowComputingUnit, + uri: String, + status: String + ) +} + +@Produces(Array(MediaType.APPLICATION_JSON)) +@Path("/computing-unit") +class WorkflowComputingUnitManagingResource { + + /** + * Create a new pod for the given user ID. + * + * @param param The parameters containing the user ID. + * @return The created pod or an error response. + */ + @POST + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/create") + def createWorkflowComputingUnit( + param: WorkflowComputingUnitCreationParams + ): DashboardWorkflowComputingUnit = { + try { + withTransaction(context) { ctx => + val wcDao = new WorkflowComputingUnitDao(ctx.configuration()) + + val computingUnit = new WorkflowComputingUnit() + + computingUnit.setUid(0) + computingUnit.setName(param.name) + computingUnit.setCreationTime(new Timestamp(System.currentTimeMillis())) + + // Insert using the DAO + wcDao.insert(computingUnit) + + // Retrieve the generated CUID + val cuid = ctx.lastID().intValue() + val insertedUnit = wcDao.fetchOneByCuid(cuid) + + // Create the pod with the generated CUID + val pod = KubernetesClientService.createPod(cuid) + + // Return the dashboard response + DashboardWorkflowComputingUnit( + insertedUnit, + KubernetesClientService.generatePodURI(cuid).toString, + pod.getStatus.getPhase + ) + } + } + } + + /** + * List all computing units created by the current user. + * + * @return A list of computing units that are not terminated. + */ + @GET + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("") + def listComputingUnits(): java.util.List[DashboardWorkflowComputingUnit] = { + withTransaction(context) { ctx => + val result = ctx + .select() + .from(WORKFLOW_COMPUTING_UNIT) + .where(WORKFLOW_COMPUTING_UNIT.TERMINATE_TIME.isNull) // Filter out terminated units + .fetch() + .map(record => { + val unit = record.into(WORKFLOW_COMPUTING_UNIT).into(classOf[WorkflowComputingUnit]) + val cuid = unit.getCuid.intValue() + val podName = KubernetesClientService.generatePodName(cuid) + val pod = KubernetesClientService.getPodByName(podName) + + DashboardWorkflowComputingUnit( + computingUnit = unit, + uri = KubernetesClientService.generatePodURI(cuid).toString, + status = if (pod != null && pod.getStatus != null) pod.getStatus.getPhase else "Unknown" + ) + }) + + result + } + } + + /** + * Terminate the computing unit's pod based on the pod URI. + * + * @param param The parameters containing the pod URI. + * @return A response indicating success or failure. + */ + @POST + @Consumes(Array(MediaType.APPLICATION_JSON)) + @Produces(Array(MediaType.APPLICATION_JSON)) + @Path("/terminate") + def terminateComputingUnit(param: WorkflowComputingUnitTerminationParams): Response = { + // Attempt to delete the pod using the provided URI + val podURI = param.uri + KubernetesClientService.deletePod(podURI) + + // If successful, update the database + withTransaction(context) { ctx => + val cuDao = new WorkflowComputingUnitDao(ctx.configuration()) + val cuid = KubernetesClientService.parseCUIDFromURI(podURI) + val units = cuDao.fetchByCuid(cuid) + + units.forEach(unit => unit.setTerminateTime(new Timestamp(System.currentTimeMillis()))) + cuDao.update(units) + } + + Response.ok(s"Successfully terminated compute unit with URI $podURI").build() + + } +} diff --git a/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala new file mode 100644 index 00000000000..5f377e459e9 --- /dev/null +++ b/core/workflow-computing-unit-managing-service/src/main/scala/edu/uci/ics/texera/service/util/KubernetesClientService.scala @@ -0,0 +1,271 @@ +package edu.uci.ics.texera.service.util + +import config.WorkflowComputingUnitManagingServiceConf +import config.WorkflowComputingUnitManagingServiceConf.{ + computeUnitImageName, + computeUnitPortNumber, + computeUnitServiceName +} +import edu.uci.ics.amber.core.storage.StorageConfig +import io.kubernetes.client.openapi.apis.CoreV1Api +import io.kubernetes.client.openapi.models._ +import io.kubernetes.client.custom.Quantity +import io.kubernetes.client.openapi.{ApiClient, Configuration} +import io.kubernetes.client.util.Config + +import java.util +import scala.jdk.CollectionConverters.CollectionHasAsScala + +object KubernetesClientService { + + private val podNamePrefix = "computing-unit" + // Create Kubernetes Core and Apps clients + private val coreApi: CoreV1Api = { + val client: ApiClient = Config.defaultClient() + Configuration.setDefaultApiClient(client) + new CoreV1Api(client) + } + + private val poolNamespace: String = + WorkflowComputingUnitManagingServiceConf.computeUnitPoolNamespace + + /** + * Generates a URI for the pod based on the computing unit ID (cuid). + * + * @param cuid The computing unit ID. + * @return A URI representing the pod location. + */ + def generatePodURI(cuid: Int): String = { + s"${generatePodName(cuid)}.$computeUnitServiceName.$poolNamespace.svc.cluster.local" + } + + /** + * Generate pod name using the cuid + * + * @param cuid The computing unit ID + * @return The pod name + */ + def generatePodName(cuid: Int): String = s"$podNamePrefix-$cuid" + + /** + * Parses the computing unit ID (cuid) from a given pod URI. + * + * @param uri The pod URI. + * @return The extracted computing unit ID as an integer. + */ + def parseCUIDFromURI(uri: String): Int = { + val pattern = """computing-unit-(\d+).*""".r + uri match { + case pattern(cuid) => cuid.toInt + case _ => throw new IllegalArgumentException(s"Invalid pod URI: $uri") + } + } + + /** + * Retrieves the list of all pods in the specified namespace. + * + * @param namespace The namespace of the pods to be returned. + * @return A list of V1Pod objects. + */ + def getPodsList(namespace: String): List[V1Pod] = { + coreApi.listNamespacedPod(namespace).execute().getItems.asScala.toList + } + + /** + * Retrieves the list of pods for a given label in the specified namespace. + * + * @param namespace The namespace of the pods to be returned. + * @param podLabel The label of the pods to be returned. + * @return A list of V1Pod objects representing the pods with the given label. + */ + def getPodsList(namespace: String, podLabel: String): List[V1Pod] = { + coreApi.listNamespacedPod(namespace).labelSelector(podLabel).execute().getItems.asScala.toList + } + + /** + * Retrieves a single pod with the given label in the specified namespace. + * + * @param namespace The namespace of the pod to be returned. + * @param podLabel The label of the pod to be returned. + * @return A V1Pod object representing the pod with the given label. + */ + def getPodFromLabel(namespace: String, podLabel: String): V1Pod = { + val podsList = getPodsList(namespace, podLabel) + if (podsList.isEmpty) { + null + } else { + podsList.last + } + } + + /** + * Checks if the pod is in the desired status. + * + * @param podName The name of the pod. + * @param desiredState The desired state. + * @return Boolean indicating if the pod is in the desired state. + */ + private def isPodInDesiredState(podName: String, desiredState: String): Boolean = { + val pod = coreApi.readNamespacedPod(podName, poolNamespace).execute() + println(pod.getStatus.getPhase) + pod.getStatus.getPhase == desiredState + } + + def getPodByName(podName: String): V1Pod = { + coreApi.readNamespacedPod(podName, poolNamespace).execute() + } + + /** + * Creates a new pod under the specified namespace for the given computing unit ID. + * + * @param cuid The computing unit ID. + * @return The newly created V1Pod object. + */ + def createPod(cuid: Int): V1Pod = { + val podName = generatePodName(cuid) + if (getPodFromLabel(poolNamespace, s"name=$podName") != null) { + throw new Exception(s"Pod with cuid $cuid already exists") + } + + // Create the PVC + val pvc = new V1PersistentVolumeClaim() + .apiVersion("v1") + .kind("PersistentVolumeClaim") + .metadata( + new V1ObjectMeta() + .name(podName + "-pvc") // Unique PVC name based on the pod name + .namespace(poolNamespace) + ) + .spec( + new V1PersistentVolumeClaimSpec() + .accessModes(util.List.of("ReadWriteOnce")) + .resources( + new V1VolumeResourceRequirements() + .requests( + util.Map.of("storage", new Quantity("2Gi")) + ) + ) + .storageClassName("nfs-client") // NFS StorageClass + ); + coreApi.createNamespacedPersistentVolumeClaim(poolNamespace, pvc).execute() + // Create the Pod + val pod = new V1Pod() + .apiVersion("v1") + .kind("Pod") + .metadata( + new V1ObjectMeta() + .name(podName) + .namespace(poolNamespace) + .labels( + util.Map.of( + "type", + "computing-unit", + "cuid", + String.valueOf(cuid), + "name", + podName + ) + ) + ) + .spec( + new V1PodSpec() + .overhead(null) // https://github.com/kubernetes-client/java/issues/3076 + .containers( + util.List.of( + new V1Container() + .name("computing-unit-master") + .image(computeUnitImageName) + .imagePullPolicy("Always") + .ports(util.List.of(new V1ContainerPort().containerPort(computeUnitPortNumber))) + .env( + util.List.of( + new V1EnvVar().name("JDBC_URL").value(StorageConfig.jdbcUrl), + new V1EnvVar().name("JDBC_USERNAME").value(StorageConfig.jdbcUsername), + new V1EnvVar().name("JDBC_PASSWORD").value(StorageConfig.jdbcPassword) + ) + ) + .volumeMounts( + util.List.of( + // Mount the PVC directly to /core/amber/user-resources + new V1VolumeMount() + .name(podName + "-pvc") + .mountPath("/core/amber/workflow-results"), + new V1VolumeMount() + .name("webserver-pvc") + .mountPath("/core/amber/user-resources") + ) + ) + .resources( + new V1ResourceRequirements() + .limits( + util.Map.of( + "cpu", Quantity.fromString("6000m"), + "memory", Quantity.fromString("16Gi") + ) + ) + .requests( + util.Map.of( + "cpu", Quantity.fromString("6000m"), + "memory", Quantity.fromString("16Gi") + ) + ) + ) + ) + ) + .volumes( + util.List.of( + new V1Volume() + .name(podName + "-pvc") // Use the PVC claim name as the volume name + .persistentVolumeClaim( + new V1PersistentVolumeClaimVolumeSource() + .claimName(podName + "-pvc") // Reference the PVC claim name + ), + new V1Volume() + .name("webserver-pvc") // Use the PVC claim name as the volume name + .persistentVolumeClaim( + new V1PersistentVolumeClaimVolumeSource() + .claimName("webserver-pvc") // Reference the PVC claim name + ) + ) + ) + .hostname(podName) + .subdomain(computeUnitServiceName) + ); + + coreApi.createNamespacedPod(poolNamespace, pod).execute() + } + + /** + * Deletes an existing pod using the pod URI. + * + * @param podURI The URI of the pod to delete. + */ + def deletePod(podURI: String): Unit = { + val cuid = parseCUIDFromURI(podURI) + coreApi.deleteNamespacedPersistentVolumeClaim(generatePodName(cuid) + "-pvc", poolNamespace).execute() + coreApi.deleteNamespacedPod(generatePodName(cuid), poolNamespace).execute() + } + + /** + * Waits for the pod to reach the desired status. + * + * @param cuid The computing unit ID. + * @param desiredStatus The desired pod status. + */ + private def waitForPodStatus(cuid: Int, desiredStatus: String): Unit = { + var attempts = 0 + val maxAttempts = 60 + val podName = generatePodName(cuid) + while (attempts < maxAttempts && !isPodInDesiredState(podName, desiredStatus)) { + attempts += 1 + Thread.sleep(1000) + println(s"Waiting for pod $podName to reach $desiredStatus (attempt $attempts)") + } + + if (!isPodInDesiredState(podName, desiredStatus)) { + throw new RuntimeException( + s"Pod $podName failed to reach $desiredStatus after $maxAttempts attempts" + ) + } + } +} diff --git a/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile b/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile new file mode 100644 index 00000000000..3cae3a68e7f --- /dev/null +++ b/core/workflow-computing-unit-managing-service/workflow-computing-unit-managing-service.dockerfile @@ -0,0 +1,16 @@ +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +# copy all projects under core to /core +WORKDIR /core +COPY core/ . + +RUN rm -rf amber/user-resources/* +RUN apt-get update && apt-get install -y unzip + +# build the service +WORKDIR /core +RUN scripts/build-services.sh + +CMD ["scripts/workflow-computing-unit-managing-service.sh"] + +EXPOSE 8888 \ No newline at end of file diff --git a/core/workflow-core/build.sbt b/core/workflow-core/build.sbt index 4cacebc207a..5b73661dfba 100644 --- a/core/workflow-core/build.sbt +++ b/core/workflow-core/build.sbt @@ -174,4 +174,5 @@ libraryDependencies ++= Seq( "org.yaml" % "snakeyaml" % "1.30", // yaml reader (downgrade to 1.30 due to dropwizard 1.3.23 required by amber) "org.apache.commons" % "commons-vfs2" % "2.9.0", // for FileResolver throw VFS-related exceptions "io.lakefs" % "sdk" % "1.51.0", // for lakeFS api calls + "com.typesafe" % "config" % "1.4.2" ) \ No newline at end of file diff --git a/core/workflow-core/src/main/resources/storage.conf b/core/workflow-core/src/main/resources/storage.conf new file mode 100644 index 00000000000..251bf3c4e9b --- /dev/null +++ b/core/workflow-core/src/main/resources/storage.conf @@ -0,0 +1,26 @@ +storage { + result-storage-mode = memory + result-storage-mode = ${?STORAGE_RESULT_MODE} + + mongodb { + url = "mongodb://localhost:27017" + url = ${?MONGODB_URL} + + database = "texera_storage" + database = ${?MONGODB_DATABASE} + + commit-batch-size = 1000 + commit-batch-size = ${?MONGODB_BATCH_SIZE} + } + + jdbc { + url = "jdbc:mysql://localhost:3306/texera_db?serverTimezone=UTC" + url = ${?JDBC_URL} + + username = "" + username = ${?JDBC_USERNAME} + + password = "" + password = ${?JDBC_PASSWORD} + } +} \ No newline at end of file diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala index b7d34e134b9..8ed071afeb5 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/util/PathUtils.scala @@ -37,6 +37,9 @@ object PathUtils { lazy val workflowCompilingServicePath: Path = corePath.resolve("workflow-compiling-service") + lazy val workflowComputingUnitManagingServicePath: Path = + corePath.resolve("workflow-computing-unit-managing-service") + lazy val fileServicePath: Path = corePath.resolve("file-service") private lazy val datasetsRootPath = diff --git a/diabetes.csv b/diabetes.csv new file mode 100644 index 00000000000..9e6a36214b9 --- /dev/null +++ b/diabetes.csv @@ -0,0 +1,769 @@ +Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome +6,148,72,35,0,33.6,0.627,50,1 +1,85,66,29,0,26.6,0.351,31,0 +8,183,64,0,0,23.3,0.672,32,1 +1,89,66,23,94,28.1,0.167,21,0 +0,137,40,35,168,43.1,2.288,33,1 +5,116,74,0,0,25.6,0.201,30,0 +3,78,50,32,88,31,0.248,26,1 +10,115,0,0,0,35.3,0.134,29,0 +2,197,70,45,543,30.5,0.158,53,1 +8,125,96,0,0,0,0.232,54,1 +4,110,92,0,0,37.6,0.191,30,0 +10,168,74,0,0,38,0.537,34,1 +10,139,80,0,0,27.1,1.441,57,0 +1,189,60,23,846,30.1,0.398,59,1 +5,166,72,19,175,25.8,0.587,51,1 +7,100,0,0,0,30,0.484,32,1 +0,118,84,47,230,45.8,0.551,31,1 +7,107,74,0,0,29.6,0.254,31,1 +1,103,30,38,83,43.3,0.183,33,0 +1,115,70,30,96,34.6,0.529,32,1 +3,126,88,41,235,39.3,0.704,27,0 +8,99,84,0,0,35.4,0.388,50,0 +7,196,90,0,0,39.8,0.451,41,1 +9,119,80,35,0,29,0.263,29,1 +11,143,94,33,146,36.6,0.254,51,1 +10,125,70,26,115,31.1,0.205,41,1 +7,147,76,0,0,39.4,0.257,43,1 +1,97,66,15,140,23.2,0.487,22,0 +13,145,82,19,110,22.2,0.245,57,0 +5,117,92,0,0,34.1,0.337,38,0 +5,109,75,26,0,36,0.546,60,0 +3,158,76,36,245,31.6,0.851,28,1 +3,88,58,11,54,24.8,0.267,22,0 +6,92,92,0,0,19.9,0.188,28,0 +10,122,78,31,0,27.6,0.512,45,0 +4,103,60,33,192,24,0.966,33,0 +11,138,76,0,0,33.2,0.42,35,0 +9,102,76,37,0,32.9,0.665,46,1 +2,90,68,42,0,38.2,0.503,27,1 +4,111,72,47,207,37.1,1.39,56,1 +3,180,64,25,70,34,0.271,26,0 +7,133,84,0,0,40.2,0.696,37,0 +7,106,92,18,0,22.7,0.235,48,0 +9,171,110,24,240,45.4,0.721,54,1 +7,159,64,0,0,27.4,0.294,40,0 +0,180,66,39,0,42,1.893,25,1 +1,146,56,0,0,29.7,0.564,29,0 +2,71,70,27,0,28,0.586,22,0 +7,103,66,32,0,39.1,0.344,31,1 +7,105,0,0,0,0,0.305,24,0 +1,103,80,11,82,19.4,0.491,22,0 +1,101,50,15,36,24.2,0.526,26,0 +5,88,66,21,23,24.4,0.342,30,0 +8,176,90,34,300,33.7,0.467,58,1 +7,150,66,42,342,34.7,0.718,42,0 +1,73,50,10,0,23,0.248,21,0 +7,187,68,39,304,37.7,0.254,41,1 +0,100,88,60,110,46.8,0.962,31,0 +0,146,82,0,0,40.5,1.781,44,0 +0,105,64,41,142,41.5,0.173,22,0 +2,84,0,0,0,0,0.304,21,0 +8,133,72,0,0,32.9,0.27,39,1 +5,44,62,0,0,25,0.587,36,0 +2,141,58,34,128,25.4,0.699,24,0 +7,114,66,0,0,32.8,0.258,42,1 +5,99,74,27,0,29,0.203,32,0 +0,109,88,30,0,32.5,0.855,38,1 +2,109,92,0,0,42.7,0.845,54,0 +1,95,66,13,38,19.6,0.334,25,0 +4,146,85,27,100,28.9,0.189,27,0 +2,100,66,20,90,32.9,0.867,28,1 +5,139,64,35,140,28.6,0.411,26,0 +13,126,90,0,0,43.4,0.583,42,1 +4,129,86,20,270,35.1,0.231,23,0 +1,79,75,30,0,32,0.396,22,0 +1,0,48,20,0,24.7,0.14,22,0 +7,62,78,0,0,32.6,0.391,41,0 +5,95,72,33,0,37.7,0.37,27,0 +0,131,0,0,0,43.2,0.27,26,1 +2,112,66,22,0,25,0.307,24,0 +3,113,44,13,0,22.4,0.14,22,0 +2,74,0,0,0,0,0.102,22,0 +7,83,78,26,71,29.3,0.767,36,0 +0,101,65,28,0,24.6,0.237,22,0 +5,137,108,0,0,48.8,0.227,37,1 +2,110,74,29,125,32.4,0.698,27,0 +13,106,72,54,0,36.6,0.178,45,0 +2,100,68,25,71,38.5,0.324,26,0 +15,136,70,32,110,37.1,0.153,43,1 +1,107,68,19,0,26.5,0.165,24,0 +1,80,55,0,0,19.1,0.258,21,0 +4,123,80,15,176,32,0.443,34,0 +7,81,78,40,48,46.7,0.261,42,0 +4,134,72,0,0,23.8,0.277,60,1 +2,142,82,18,64,24.7,0.761,21,0 +6,144,72,27,228,33.9,0.255,40,0 +2,92,62,28,0,31.6,0.13,24,0 +1,71,48,18,76,20.4,0.323,22,0 +6,93,50,30,64,28.7,0.356,23,0 +1,122,90,51,220,49.7,0.325,31,1 +1,163,72,0,0,39,1.222,33,1 +1,151,60,0,0,26.1,0.179,22,0 +0,125,96,0,0,22.5,0.262,21,0 +1,81,72,18,40,26.6,0.283,24,0 +2,85,65,0,0,39.6,0.93,27,0 +1,126,56,29,152,28.7,0.801,21,0 +1,96,122,0,0,22.4,0.207,27,0 +4,144,58,28,140,29.5,0.287,37,0 +3,83,58,31,18,34.3,0.336,25,0 +0,95,85,25,36,37.4,0.247,24,1 +3,171,72,33,135,33.3,0.199,24,1 +8,155,62,26,495,34,0.543,46,1 +1,89,76,34,37,31.2,0.192,23,0 +4,76,62,0,0,34,0.391,25,0 +7,160,54,32,175,30.5,0.588,39,1 +4,146,92,0,0,31.2,0.539,61,1 +5,124,74,0,0,34,0.22,38,1 +5,78,48,0,0,33.7,0.654,25,0 +4,97,60,23,0,28.2,0.443,22,0 +4,99,76,15,51,23.2,0.223,21,0 +0,162,76,56,100,53.2,0.759,25,1 +6,111,64,39,0,34.2,0.26,24,0 +2,107,74,30,100,33.6,0.404,23,0 +5,132,80,0,0,26.8,0.186,69,0 +0,113,76,0,0,33.3,0.278,23,1 +1,88,30,42,99,55,0.496,26,1 +3,120,70,30,135,42.9,0.452,30,0 +1,118,58,36,94,33.3,0.261,23,0 +1,117,88,24,145,34.5,0.403,40,1 +0,105,84,0,0,27.9,0.741,62,1 +4,173,70,14,168,29.7,0.361,33,1 +9,122,56,0,0,33.3,1.114,33,1 +3,170,64,37,225,34.5,0.356,30,1 +8,84,74,31,0,38.3,0.457,39,0 +2,96,68,13,49,21.1,0.647,26,0 +2,125,60,20,140,33.8,0.088,31,0 +0,100,70,26,50,30.8,0.597,21,0 +0,93,60,25,92,28.7,0.532,22,0 +0,129,80,0,0,31.2,0.703,29,0 +5,105,72,29,325,36.9,0.159,28,0 +3,128,78,0,0,21.1,0.268,55,0 +5,106,82,30,0,39.5,0.286,38,0 +2,108,52,26,63,32.5,0.318,22,0 +10,108,66,0,0,32.4,0.272,42,1 +4,154,62,31,284,32.8,0.237,23,0 +0,102,75,23,0,0,0.572,21,0 +9,57,80,37,0,32.8,0.096,41,0 +2,106,64,35,119,30.5,1.4,34,0 +5,147,78,0,0,33.7,0.218,65,0 +2,90,70,17,0,27.3,0.085,22,0 +1,136,74,50,204,37.4,0.399,24,0 +4,114,65,0,0,21.9,0.432,37,0 +9,156,86,28,155,34.3,1.189,42,1 +1,153,82,42,485,40.6,0.687,23,0 +8,188,78,0,0,47.9,0.137,43,1 +7,152,88,44,0,50,0.337,36,1 +2,99,52,15,94,24.6,0.637,21,0 +1,109,56,21,135,25.2,0.833,23,0 +2,88,74,19,53,29,0.229,22,0 +17,163,72,41,114,40.9,0.817,47,1 +4,151,90,38,0,29.7,0.294,36,0 +7,102,74,40,105,37.2,0.204,45,0 +0,114,80,34,285,44.2,0.167,27,0 +2,100,64,23,0,29.7,0.368,21,0 +0,131,88,0,0,31.6,0.743,32,1 +6,104,74,18,156,29.9,0.722,41,1 +3,148,66,25,0,32.5,0.256,22,0 +4,120,68,0,0,29.6,0.709,34,0 +4,110,66,0,0,31.9,0.471,29,0 +3,111,90,12,78,28.4,0.495,29,0 +6,102,82,0,0,30.8,0.18,36,1 +6,134,70,23,130,35.4,0.542,29,1 +2,87,0,23,0,28.9,0.773,25,0 +1,79,60,42,48,43.5,0.678,23,0 +2,75,64,24,55,29.7,0.37,33,0 +8,179,72,42,130,32.7,0.719,36,1 +6,85,78,0,0,31.2,0.382,42,0 +0,129,110,46,130,67.1,0.319,26,1 +5,143,78,0,0,45,0.19,47,0 +5,130,82,0,0,39.1,0.956,37,1 +6,87,80,0,0,23.2,0.084,32,0 +0,119,64,18,92,34.9,0.725,23,0 +1,0,74,20,23,27.7,0.299,21,0 +5,73,60,0,0,26.8,0.268,27,0 +4,141,74,0,0,27.6,0.244,40,0 +7,194,68,28,0,35.9,0.745,41,1 +8,181,68,36,495,30.1,0.615,60,1 +1,128,98,41,58,32,1.321,33,1 +8,109,76,39,114,27.9,0.64,31,1 +5,139,80,35,160,31.6,0.361,25,1 +3,111,62,0,0,22.6,0.142,21,0 +9,123,70,44,94,33.1,0.374,40,0 +7,159,66,0,0,30.4,0.383,36,1 +11,135,0,0,0,52.3,0.578,40,1 +8,85,55,20,0,24.4,0.136,42,0 +5,158,84,41,210,39.4,0.395,29,1 +1,105,58,0,0,24.3,0.187,21,0 +3,107,62,13,48,22.9,0.678,23,1 +4,109,64,44,99,34.8,0.905,26,1 +4,148,60,27,318,30.9,0.15,29,1 +0,113,80,16,0,31,0.874,21,0 +1,138,82,0,0,40.1,0.236,28,0 +0,108,68,20,0,27.3,0.787,32,0 +2,99,70,16,44,20.4,0.235,27,0 +6,103,72,32,190,37.7,0.324,55,0 +5,111,72,28,0,23.9,0.407,27,0 +8,196,76,29,280,37.5,0.605,57,1 +5,162,104,0,0,37.7,0.151,52,1 +1,96,64,27,87,33.2,0.289,21,0 +7,184,84,33,0,35.5,0.355,41,1 +2,81,60,22,0,27.7,0.29,25,0 +0,147,85,54,0,42.8,0.375,24,0 +7,179,95,31,0,34.2,0.164,60,0 +0,140,65,26,130,42.6,0.431,24,1 +9,112,82,32,175,34.2,0.26,36,1 +12,151,70,40,271,41.8,0.742,38,1 +5,109,62,41,129,35.8,0.514,25,1 +6,125,68,30,120,30,0.464,32,0 +5,85,74,22,0,29,1.224,32,1 +5,112,66,0,0,37.8,0.261,41,1 +0,177,60,29,478,34.6,1.072,21,1 +2,158,90,0,0,31.6,0.805,66,1 +7,119,0,0,0,25.2,0.209,37,0 +7,142,60,33,190,28.8,0.687,61,0 +1,100,66,15,56,23.6,0.666,26,0 +1,87,78,27,32,34.6,0.101,22,0 +0,101,76,0,0,35.7,0.198,26,0 +3,162,52,38,0,37.2,0.652,24,1 +4,197,70,39,744,36.7,2.329,31,0 +0,117,80,31,53,45.2,0.089,24,0 +4,142,86,0,0,44,0.645,22,1 +6,134,80,37,370,46.2,0.238,46,1 +1,79,80,25,37,25.4,0.583,22,0 +4,122,68,0,0,35,0.394,29,0 +3,74,68,28,45,29.7,0.293,23,0 +4,171,72,0,0,43.6,0.479,26,1 +7,181,84,21,192,35.9,0.586,51,1 +0,179,90,27,0,44.1,0.686,23,1 +9,164,84,21,0,30.8,0.831,32,1 +0,104,76,0,0,18.4,0.582,27,0 +1,91,64,24,0,29.2,0.192,21,0 +4,91,70,32,88,33.1,0.446,22,0 +3,139,54,0,0,25.6,0.402,22,1 +6,119,50,22,176,27.1,1.318,33,1 +2,146,76,35,194,38.2,0.329,29,0 +9,184,85,15,0,30,1.213,49,1 +10,122,68,0,0,31.2,0.258,41,0 +0,165,90,33,680,52.3,0.427,23,0 +9,124,70,33,402,35.4,0.282,34,0 +1,111,86,19,0,30.1,0.143,23,0 +9,106,52,0,0,31.2,0.38,42,0 +2,129,84,0,0,28,0.284,27,0 +2,90,80,14,55,24.4,0.249,24,0 +0,86,68,32,0,35.8,0.238,25,0 +12,92,62,7,258,27.6,0.926,44,1 +1,113,64,35,0,33.6,0.543,21,1 +3,111,56,39,0,30.1,0.557,30,0 +2,114,68,22,0,28.7,0.092,25,0 +1,193,50,16,375,25.9,0.655,24,0 +11,155,76,28,150,33.3,1.353,51,1 +3,191,68,15,130,30.9,0.299,34,0 +3,141,0,0,0,30,0.761,27,1 +4,95,70,32,0,32.1,0.612,24,0 +3,142,80,15,0,32.4,0.2,63,0 +4,123,62,0,0,32,0.226,35,1 +5,96,74,18,67,33.6,0.997,43,0 +0,138,0,0,0,36.3,0.933,25,1 +2,128,64,42,0,40,1.101,24,0 +0,102,52,0,0,25.1,0.078,21,0 +2,146,0,0,0,27.5,0.24,28,1 +10,101,86,37,0,45.6,1.136,38,1 +2,108,62,32,56,25.2,0.128,21,0 +3,122,78,0,0,23,0.254,40,0 +1,71,78,50,45,33.2,0.422,21,0 +13,106,70,0,0,34.2,0.251,52,0 +2,100,70,52,57,40.5,0.677,25,0 +7,106,60,24,0,26.5,0.296,29,1 +0,104,64,23,116,27.8,0.454,23,0 +5,114,74,0,0,24.9,0.744,57,0 +2,108,62,10,278,25.3,0.881,22,0 +0,146,70,0,0,37.9,0.334,28,1 +10,129,76,28,122,35.9,0.28,39,0 +7,133,88,15,155,32.4,0.262,37,0 +7,161,86,0,0,30.4,0.165,47,1 +2,108,80,0,0,27,0.259,52,1 +7,136,74,26,135,26,0.647,51,0 +5,155,84,44,545,38.7,0.619,34,0 +1,119,86,39,220,45.6,0.808,29,1 +4,96,56,17,49,20.8,0.34,26,0 +5,108,72,43,75,36.1,0.263,33,0 +0,78,88,29,40,36.9,0.434,21,0 +0,107,62,30,74,36.6,0.757,25,1 +2,128,78,37,182,43.3,1.224,31,1 +1,128,48,45,194,40.5,0.613,24,1 +0,161,50,0,0,21.9,0.254,65,0 +6,151,62,31,120,35.5,0.692,28,0 +2,146,70,38,360,28,0.337,29,1 +0,126,84,29,215,30.7,0.52,24,0 +14,100,78,25,184,36.6,0.412,46,1 +8,112,72,0,0,23.6,0.84,58,0 +0,167,0,0,0,32.3,0.839,30,1 +2,144,58,33,135,31.6,0.422,25,1 +5,77,82,41,42,35.8,0.156,35,0 +5,115,98,0,0,52.9,0.209,28,1 +3,150,76,0,0,21,0.207,37,0 +2,120,76,37,105,39.7,0.215,29,0 +10,161,68,23,132,25.5,0.326,47,1 +0,137,68,14,148,24.8,0.143,21,0 +0,128,68,19,180,30.5,1.391,25,1 +2,124,68,28,205,32.9,0.875,30,1 +6,80,66,30,0,26.2,0.313,41,0 +0,106,70,37,148,39.4,0.605,22,0 +2,155,74,17,96,26.6,0.433,27,1 +3,113,50,10,85,29.5,0.626,25,0 +7,109,80,31,0,35.9,1.127,43,1 +2,112,68,22,94,34.1,0.315,26,0 +3,99,80,11,64,19.3,0.284,30,0 +3,182,74,0,0,30.5,0.345,29,1 +3,115,66,39,140,38.1,0.15,28,0 +6,194,78,0,0,23.5,0.129,59,1 +4,129,60,12,231,27.5,0.527,31,0 +3,112,74,30,0,31.6,0.197,25,1 +0,124,70,20,0,27.4,0.254,36,1 +13,152,90,33,29,26.8,0.731,43,1 +2,112,75,32,0,35.7,0.148,21,0 +1,157,72,21,168,25.6,0.123,24,0 +1,122,64,32,156,35.1,0.692,30,1 +10,179,70,0,0,35.1,0.2,37,0 +2,102,86,36,120,45.5,0.127,23,1 +6,105,70,32,68,30.8,0.122,37,0 +8,118,72,19,0,23.1,1.476,46,0 +2,87,58,16,52,32.7,0.166,25,0 +1,180,0,0,0,43.3,0.282,41,1 +12,106,80,0,0,23.6,0.137,44,0 +1,95,60,18,58,23.9,0.26,22,0 +0,165,76,43,255,47.9,0.259,26,0 +0,117,0,0,0,33.8,0.932,44,0 +5,115,76,0,0,31.2,0.343,44,1 +9,152,78,34,171,34.2,0.893,33,1 +7,178,84,0,0,39.9,0.331,41,1 +1,130,70,13,105,25.9,0.472,22,0 +1,95,74,21,73,25.9,0.673,36,0 +1,0,68,35,0,32,0.389,22,0 +5,122,86,0,0,34.7,0.29,33,0 +8,95,72,0,0,36.8,0.485,57,0 +8,126,88,36,108,38.5,0.349,49,0 +1,139,46,19,83,28.7,0.654,22,0 +3,116,0,0,0,23.5,0.187,23,0 +3,99,62,19,74,21.8,0.279,26,0 +5,0,80,32,0,41,0.346,37,1 +4,92,80,0,0,42.2,0.237,29,0 +4,137,84,0,0,31.2,0.252,30,0 +3,61,82,28,0,34.4,0.243,46,0 +1,90,62,12,43,27.2,0.58,24,0 +3,90,78,0,0,42.7,0.559,21,0 +9,165,88,0,0,30.4,0.302,49,1 +1,125,50,40,167,33.3,0.962,28,1 +13,129,0,30,0,39.9,0.569,44,1 +12,88,74,40,54,35.3,0.378,48,0 +1,196,76,36,249,36.5,0.875,29,1 +5,189,64,33,325,31.2,0.583,29,1 +5,158,70,0,0,29.8,0.207,63,0 +5,103,108,37,0,39.2,0.305,65,0 +4,146,78,0,0,38.5,0.52,67,1 +4,147,74,25,293,34.9,0.385,30,0 +5,99,54,28,83,34,0.499,30,0 +6,124,72,0,0,27.6,0.368,29,1 +0,101,64,17,0,21,0.252,21,0 +3,81,86,16,66,27.5,0.306,22,0 +1,133,102,28,140,32.8,0.234,45,1 +3,173,82,48,465,38.4,2.137,25,1 +0,118,64,23,89,0,1.731,21,0 +0,84,64,22,66,35.8,0.545,21,0 +2,105,58,40,94,34.9,0.225,25,0 +2,122,52,43,158,36.2,0.816,28,0 +12,140,82,43,325,39.2,0.528,58,1 +0,98,82,15,84,25.2,0.299,22,0 +1,87,60,37,75,37.2,0.509,22,0 +4,156,75,0,0,48.3,0.238,32,1 +0,93,100,39,72,43.4,1.021,35,0 +1,107,72,30,82,30.8,0.821,24,0 +0,105,68,22,0,20,0.236,22,0 +1,109,60,8,182,25.4,0.947,21,0 +1,90,62,18,59,25.1,1.268,25,0 +1,125,70,24,110,24.3,0.221,25,0 +1,119,54,13,50,22.3,0.205,24,0 +5,116,74,29,0,32.3,0.66,35,1 +8,105,100,36,0,43.3,0.239,45,1 +5,144,82,26,285,32,0.452,58,1 +3,100,68,23,81,31.6,0.949,28,0 +1,100,66,29,196,32,0.444,42,0 +5,166,76,0,0,45.7,0.34,27,1 +1,131,64,14,415,23.7,0.389,21,0 +4,116,72,12,87,22.1,0.463,37,0 +4,158,78,0,0,32.9,0.803,31,1 +2,127,58,24,275,27.7,1.6,25,0 +3,96,56,34,115,24.7,0.944,39,0 +0,131,66,40,0,34.3,0.196,22,1 +3,82,70,0,0,21.1,0.389,25,0 +3,193,70,31,0,34.9,0.241,25,1 +4,95,64,0,0,32,0.161,31,1 +6,137,61,0,0,24.2,0.151,55,0 +5,136,84,41,88,35,0.286,35,1 +9,72,78,25,0,31.6,0.28,38,0 +5,168,64,0,0,32.9,0.135,41,1 +2,123,48,32,165,42.1,0.52,26,0 +4,115,72,0,0,28.9,0.376,46,1 +0,101,62,0,0,21.9,0.336,25,0 +8,197,74,0,0,25.9,1.191,39,1 +1,172,68,49,579,42.4,0.702,28,1 +6,102,90,39,0,35.7,0.674,28,0 +1,112,72,30,176,34.4,0.528,25,0 +1,143,84,23,310,42.4,1.076,22,0 +1,143,74,22,61,26.2,0.256,21,0 +0,138,60,35,167,34.6,0.534,21,1 +3,173,84,33,474,35.7,0.258,22,1 +1,97,68,21,0,27.2,1.095,22,0 +4,144,82,32,0,38.5,0.554,37,1 +1,83,68,0,0,18.2,0.624,27,0 +3,129,64,29,115,26.4,0.219,28,1 +1,119,88,41,170,45.3,0.507,26,0 +2,94,68,18,76,26,0.561,21,0 +0,102,64,46,78,40.6,0.496,21,0 +2,115,64,22,0,30.8,0.421,21,0 +8,151,78,32,210,42.9,0.516,36,1 +4,184,78,39,277,37,0.264,31,1 +0,94,0,0,0,0,0.256,25,0 +1,181,64,30,180,34.1,0.328,38,1 +0,135,94,46,145,40.6,0.284,26,0 +1,95,82,25,180,35,0.233,43,1 +2,99,0,0,0,22.2,0.108,23,0 +3,89,74,16,85,30.4,0.551,38,0 +1,80,74,11,60,30,0.527,22,0 +2,139,75,0,0,25.6,0.167,29,0 +1,90,68,8,0,24.5,1.138,36,0 +0,141,0,0,0,42.4,0.205,29,1 +12,140,85,33,0,37.4,0.244,41,0 +5,147,75,0,0,29.9,0.434,28,0 +1,97,70,15,0,18.2,0.147,21,0 +6,107,88,0,0,36.8,0.727,31,0 +0,189,104,25,0,34.3,0.435,41,1 +2,83,66,23,50,32.2,0.497,22,0 +4,117,64,27,120,33.2,0.23,24,0 +8,108,70,0,0,30.5,0.955,33,1 +4,117,62,12,0,29.7,0.38,30,1 +0,180,78,63,14,59.4,2.42,25,1 +1,100,72,12,70,25.3,0.658,28,0 +0,95,80,45,92,36.5,0.33,26,0 +0,104,64,37,64,33.6,0.51,22,1 +0,120,74,18,63,30.5,0.285,26,0 +1,82,64,13,95,21.2,0.415,23,0 +2,134,70,0,0,28.9,0.542,23,1 +0,91,68,32,210,39.9,0.381,25,0 +2,119,0,0,0,19.6,0.832,72,0 +2,100,54,28,105,37.8,0.498,24,0 +14,175,62,30,0,33.6,0.212,38,1 +1,135,54,0,0,26.7,0.687,62,0 +5,86,68,28,71,30.2,0.364,24,0 +10,148,84,48,237,37.6,1.001,51,1 +9,134,74,33,60,25.9,0.46,81,0 +9,120,72,22,56,20.8,0.733,48,0 +1,71,62,0,0,21.8,0.416,26,0 +8,74,70,40,49,35.3,0.705,39,0 +5,88,78,30,0,27.6,0.258,37,0 +10,115,98,0,0,24,1.022,34,0 +0,124,56,13,105,21.8,0.452,21,0 +0,74,52,10,36,27.8,0.269,22,0 +0,97,64,36,100,36.8,0.6,25,0 +8,120,0,0,0,30,0.183,38,1 +6,154,78,41,140,46.1,0.571,27,0 +1,144,82,40,0,41.3,0.607,28,0 +0,137,70,38,0,33.2,0.17,22,0 +0,119,66,27,0,38.8,0.259,22,0 +7,136,90,0,0,29.9,0.21,50,0 +4,114,64,0,0,28.9,0.126,24,0 +0,137,84,27,0,27.3,0.231,59,0 +2,105,80,45,191,33.7,0.711,29,1 +7,114,76,17,110,23.8,0.466,31,0 +8,126,74,38,75,25.9,0.162,39,0 +4,132,86,31,0,28,0.419,63,0 +3,158,70,30,328,35.5,0.344,35,1 +0,123,88,37,0,35.2,0.197,29,0 +4,85,58,22,49,27.8,0.306,28,0 +0,84,82,31,125,38.2,0.233,23,0 +0,145,0,0,0,44.2,0.63,31,1 +0,135,68,42,250,42.3,0.365,24,1 +1,139,62,41,480,40.7,0.536,21,0 +0,173,78,32,265,46.5,1.159,58,0 +4,99,72,17,0,25.6,0.294,28,0 +8,194,80,0,0,26.1,0.551,67,0 +2,83,65,28,66,36.8,0.629,24,0 +2,89,90,30,0,33.5,0.292,42,0 +4,99,68,38,0,32.8,0.145,33,0 +4,125,70,18,122,28.9,1.144,45,1 +3,80,0,0,0,0,0.174,22,0 +6,166,74,0,0,26.6,0.304,66,0 +5,110,68,0,0,26,0.292,30,0 +2,81,72,15,76,30.1,0.547,25,0 +7,195,70,33,145,25.1,0.163,55,1 +6,154,74,32,193,29.3,0.839,39,0 +2,117,90,19,71,25.2,0.313,21,0 +3,84,72,32,0,37.2,0.267,28,0 +6,0,68,41,0,39,0.727,41,1 +7,94,64,25,79,33.3,0.738,41,0 +3,96,78,39,0,37.3,0.238,40,0 +10,75,82,0,0,33.3,0.263,38,0 +0,180,90,26,90,36.5,0.314,35,1 +1,130,60,23,170,28.6,0.692,21,0 +2,84,50,23,76,30.4,0.968,21,0 +8,120,78,0,0,25,0.409,64,0 +12,84,72,31,0,29.7,0.297,46,1 +0,139,62,17,210,22.1,0.207,21,0 +9,91,68,0,0,24.2,0.2,58,0 +2,91,62,0,0,27.3,0.525,22,0 +3,99,54,19,86,25.6,0.154,24,0 +3,163,70,18,105,31.6,0.268,28,1 +9,145,88,34,165,30.3,0.771,53,1 +7,125,86,0,0,37.6,0.304,51,0 +13,76,60,0,0,32.8,0.18,41,0 +6,129,90,7,326,19.6,0.582,60,0 +2,68,70,32,66,25,0.187,25,0 +3,124,80,33,130,33.2,0.305,26,0 +6,114,0,0,0,0,0.189,26,0 +9,130,70,0,0,34.2,0.652,45,1 +3,125,58,0,0,31.6,0.151,24,0 +3,87,60,18,0,21.8,0.444,21,0 +1,97,64,19,82,18.2,0.299,21,0 +3,116,74,15,105,26.3,0.107,24,0 +0,117,66,31,188,30.8,0.493,22,0 +0,111,65,0,0,24.6,0.66,31,0 +2,122,60,18,106,29.8,0.717,22,0 +0,107,76,0,0,45.3,0.686,24,0 +1,86,66,52,65,41.3,0.917,29,0 +6,91,0,0,0,29.8,0.501,31,0 +1,77,56,30,56,33.3,1.251,24,0 +4,132,0,0,0,32.9,0.302,23,1 +0,105,90,0,0,29.6,0.197,46,0 +0,57,60,0,0,21.7,0.735,67,0 +0,127,80,37,210,36.3,0.804,23,0 +3,129,92,49,155,36.4,0.968,32,1 +8,100,74,40,215,39.4,0.661,43,1 +3,128,72,25,190,32.4,0.549,27,1 +10,90,85,32,0,34.9,0.825,56,1 +4,84,90,23,56,39.5,0.159,25,0 +1,88,78,29,76,32,0.365,29,0 +8,186,90,35,225,34.5,0.423,37,1 +5,187,76,27,207,43.6,1.034,53,1 +4,131,68,21,166,33.1,0.16,28,0 +1,164,82,43,67,32.8,0.341,50,0 +4,189,110,31,0,28.5,0.68,37,0 +1,116,70,28,0,27.4,0.204,21,0 +3,84,68,30,106,31.9,0.591,25,0 +6,114,88,0,0,27.8,0.247,66,0 +1,88,62,24,44,29.9,0.422,23,0 +1,84,64,23,115,36.9,0.471,28,0 +7,124,70,33,215,25.5,0.161,37,0 +1,97,70,40,0,38.1,0.218,30,0 +8,110,76,0,0,27.8,0.237,58,0 +11,103,68,40,0,46.2,0.126,42,0 +11,85,74,0,0,30.1,0.3,35,0 +6,125,76,0,0,33.8,0.121,54,1 +0,198,66,32,274,41.3,0.502,28,1 +1,87,68,34,77,37.6,0.401,24,0 +6,99,60,19,54,26.9,0.497,32,0 +0,91,80,0,0,32.4,0.601,27,0 +2,95,54,14,88,26.1,0.748,22,0 +1,99,72,30,18,38.6,0.412,21,0 +6,92,62,32,126,32,0.085,46,0 +4,154,72,29,126,31.3,0.338,37,0 +0,121,66,30,165,34.3,0.203,33,1 +3,78,70,0,0,32.5,0.27,39,0 +2,130,96,0,0,22.6,0.268,21,0 +3,111,58,31,44,29.5,0.43,22,0 +2,98,60,17,120,34.7,0.198,22,0 +1,143,86,30,330,30.1,0.892,23,0 +1,119,44,47,63,35.5,0.28,25,0 +6,108,44,20,130,24,0.813,35,0 +2,118,80,0,0,42.9,0.693,21,1 +10,133,68,0,0,27,0.245,36,0 +2,197,70,99,0,34.7,0.575,62,1 +0,151,90,46,0,42.1,0.371,21,1 +6,109,60,27,0,25,0.206,27,0 +12,121,78,17,0,26.5,0.259,62,0 +8,100,76,0,0,38.7,0.19,42,0 +8,124,76,24,600,28.7,0.687,52,1 +1,93,56,11,0,22.5,0.417,22,0 +8,143,66,0,0,34.9,0.129,41,1 +6,103,66,0,0,24.3,0.249,29,0 +3,176,86,27,156,33.3,1.154,52,1 +0,73,0,0,0,21.1,0.342,25,0 +11,111,84,40,0,46.8,0.925,45,1 +2,112,78,50,140,39.4,0.175,24,0 +3,132,80,0,0,34.4,0.402,44,1 +2,82,52,22,115,28.5,1.699,25,0 +6,123,72,45,230,33.6,0.733,34,0 +0,188,82,14,185,32,0.682,22,1 +0,67,76,0,0,45.3,0.194,46,0 +1,89,24,19,25,27.8,0.559,21,0 +1,173,74,0,0,36.8,0.088,38,1 +1,109,38,18,120,23.1,0.407,26,0 +1,108,88,19,0,27.1,0.4,24,0 +6,96,0,0,0,23.7,0.19,28,0 +1,124,74,36,0,27.8,0.1,30,0 +7,150,78,29,126,35.2,0.692,54,1 +4,183,0,0,0,28.4,0.212,36,1 +1,124,60,32,0,35.8,0.514,21,0 +1,181,78,42,293,40,1.258,22,1 +1,92,62,25,41,19.5,0.482,25,0 +0,152,82,39,272,41.5,0.27,27,0 +1,111,62,13,182,24,0.138,23,0 +3,106,54,21,158,30.9,0.292,24,0 +3,174,58,22,194,32.9,0.593,36,1 +7,168,88,42,321,38.2,0.787,40,1 +6,105,80,28,0,32.5,0.878,26,0 +11,138,74,26,144,36.1,0.557,50,1 +3,106,72,0,0,25.8,0.207,27,0 +6,117,96,0,0,28.7,0.157,30,0 +2,68,62,13,15,20.1,0.257,23,0 +9,112,82,24,0,28.2,1.282,50,1 +0,119,0,0,0,32.4,0.141,24,1 +2,112,86,42,160,38.4,0.246,28,0 +2,92,76,20,0,24.2,1.698,28,0 +6,183,94,0,0,40.8,1.461,45,0 +0,94,70,27,115,43.5,0.347,21,0 +2,108,64,0,0,30.8,0.158,21,0 +4,90,88,47,54,37.7,0.362,29,0 +0,125,68,0,0,24.7,0.206,21,0 +0,132,78,0,0,32.4,0.393,21,0 +5,128,80,0,0,34.6,0.144,45,0 +4,94,65,22,0,24.7,0.148,21,0 +7,114,64,0,0,27.4,0.732,34,1 +0,102,78,40,90,34.5,0.238,24,0 +2,111,60,0,0,26.2,0.343,23,0 +1,128,82,17,183,27.5,0.115,22,0 +10,92,62,0,0,25.9,0.167,31,0 +13,104,72,0,0,31.2,0.465,38,1 +5,104,74,0,0,28.8,0.153,48,0 +2,94,76,18,66,31.6,0.649,23,0 +7,97,76,32,91,40.9,0.871,32,1 +1,100,74,12,46,19.5,0.149,28,0 +0,102,86,17,105,29.3,0.695,27,0 +4,128,70,0,0,34.3,0.303,24,0 +6,147,80,0,0,29.5,0.178,50,1 +4,90,0,0,0,28,0.61,31,0 +3,103,72,30,152,27.6,0.73,27,0 +2,157,74,35,440,39.4,0.134,30,0 +1,167,74,17,144,23.4,0.447,33,1 +0,179,50,36,159,37.8,0.455,22,1 +11,136,84,35,130,28.3,0.26,42,1 +0,107,60,25,0,26.4,0.133,23,0 +1,91,54,25,100,25.2,0.234,23,0 +1,117,60,23,106,33.8,0.466,27,0 +5,123,74,40,77,34.1,0.269,28,0 +2,120,54,0,0,26.8,0.455,27,0 +1,106,70,28,135,34.2,0.142,22,0 +2,155,52,27,540,38.7,0.24,25,1 +2,101,58,35,90,21.8,0.155,22,0 +1,120,80,48,200,38.9,1.162,41,0 +11,127,106,0,0,39,0.19,51,0 +3,80,82,31,70,34.2,1.292,27,1 +10,162,84,0,0,27.7,0.182,54,0 +1,199,76,43,0,42.9,1.394,22,1 +8,167,106,46,231,37.6,0.165,43,1 +9,145,80,46,130,37.9,0.637,40,1 +6,115,60,39,0,33.7,0.245,40,1 +1,112,80,45,132,34.8,0.217,24,0 +4,145,82,18,0,32.5,0.235,70,1 +10,111,70,27,0,27.5,0.141,40,1 +6,98,58,33,190,34,0.43,43,0 +9,154,78,30,100,30.9,0.164,45,0 +6,165,68,26,168,33.6,0.631,49,0 +1,99,58,10,0,25.4,0.551,21,0 +10,68,106,23,49,35.5,0.285,47,0 +3,123,100,35,240,57.3,0.88,22,0 +8,91,82,0,0,35.6,0.587,68,0 +6,195,70,0,0,30.9,0.328,31,1 +9,156,86,0,0,24.8,0.23,53,1 +0,93,60,0,0,35.3,0.263,25,0 +3,121,52,0,0,36,0.127,25,1 +2,101,58,17,265,24.2,0.614,23,0 +2,56,56,28,45,24.2,0.332,22,0 +0,162,76,36,0,49.6,0.364,26,1 +0,95,64,39,105,44.6,0.366,22,0 +4,125,80,0,0,32.3,0.536,27,1 +5,136,82,0,0,0,0.64,69,0 +2,129,74,26,205,33.2,0.591,25,0 +3,130,64,0,0,23.1,0.314,22,0 +1,107,50,19,0,28.3,0.181,29,0 +1,140,74,26,180,24.1,0.828,23,0 +1,144,82,46,180,46.1,0.335,46,1 +8,107,80,0,0,24.6,0.856,34,0 +13,158,114,0,0,42.3,0.257,44,1 +2,121,70,32,95,39.1,0.886,23,0 +7,129,68,49,125,38.5,0.439,43,1 +2,90,60,0,0,23.5,0.191,25,0 +7,142,90,24,480,30.4,0.128,43,1 +3,169,74,19,125,29.9,0.268,31,1 +0,99,0,0,0,25,0.253,22,0 +4,127,88,11,155,34.5,0.598,28,0 +4,118,70,0,0,44.5,0.904,26,0 +2,122,76,27,200,35.9,0.483,26,0 +6,125,78,31,0,27.6,0.565,49,1 +1,168,88,29,0,35,0.905,52,1 +2,129,0,0,0,38.5,0.304,41,0 +4,110,76,20,100,28.4,0.118,27,0 +6,80,80,36,0,39.8,0.177,28,0 +10,115,0,0,0,0,0.261,30,1 +2,127,46,21,335,34.4,0.176,22,0 +9,164,78,0,0,32.8,0.148,45,1 +2,93,64,32,160,38,0.674,23,1 +3,158,64,13,387,31.2,0.295,24,0 +5,126,78,27,22,29.6,0.439,40,0 +10,129,62,36,0,41.2,0.441,38,1 +0,134,58,20,291,26.4,0.352,21,0 +3,102,74,0,0,29.5,0.121,32,0 +7,187,50,33,392,33.9,0.826,34,1 +3,173,78,39,185,33.8,0.97,31,1 +10,94,72,18,0,23.1,0.595,56,0 +1,108,60,46,178,35.5,0.415,24,0 +5,97,76,27,0,35.6,0.378,52,1 +4,83,86,19,0,29.3,0.317,34,0 +1,114,66,36,200,38.1,0.289,21,0 +1,149,68,29,127,29.3,0.349,42,1 +5,117,86,30,105,39.1,0.251,42,0 +1,111,94,0,0,32.8,0.265,45,0 +4,112,78,40,0,39.4,0.236,38,0 +1,116,78,29,180,36.1,0.496,25,0 +0,141,84,26,0,32.4,0.433,22,0 +2,175,88,0,0,22.9,0.326,22,0 +2,92,52,0,0,30.1,0.141,22,0 +3,130,78,23,79,28.4,0.323,34,1 +8,120,86,0,0,28.4,0.259,22,1 +2,174,88,37,120,44.5,0.646,24,1 +2,106,56,27,165,29,0.426,22,0 +2,105,75,0,0,23.3,0.56,53,0 +4,95,60,32,0,35.4,0.284,28,0 +0,126,86,27,120,27.4,0.515,21,0 +8,65,72,23,0,32,0.6,42,0 +2,99,60,17,160,36.6,0.453,21,0 +1,102,74,0,0,39.5,0.293,42,1 +11,120,80,37,150,42.3,0.785,48,1 +3,102,44,20,94,30.8,0.4,26,0 +1,109,58,18,116,28.5,0.219,22,0 +9,140,94,0,0,32.7,0.734,45,1 +13,153,88,37,140,40.6,1.174,39,0 +12,100,84,33,105,30,0.488,46,0 +1,147,94,41,0,49.3,0.358,27,1 +1,81,74,41,57,46.3,1.096,32,0 +3,187,70,22,200,36.4,0.408,36,1 +6,162,62,0,0,24.3,0.178,50,1 +4,136,70,0,0,31.2,1.182,22,1 +1,121,78,39,74,39,0.261,28,0 +3,108,62,24,0,26,0.223,25,0 +0,181,88,44,510,43.3,0.222,26,1 +8,154,78,32,0,32.4,0.443,45,1 +1,128,88,39,110,36.5,1.057,37,1 +7,137,90,41,0,32,0.391,39,0 +0,123,72,0,0,36.3,0.258,52,1 +1,106,76,0,0,37.5,0.197,26,0 +6,190,92,0,0,35.5,0.278,66,1 +2,88,58,26,16,28.4,0.766,22,0 +9,170,74,31,0,44,0.403,43,1 +9,89,62,0,0,22.5,0.142,33,0 +10,101,76,48,180,32.9,0.171,63,0 +2,122,70,27,0,36.8,0.34,27,0 +5,121,72,23,112,26.2,0.245,30,0 +1,126,60,0,0,30.1,0.349,47,1 +1,93,70,31,0,30.4,0.315,23,0 \ No newline at end of file diff --git a/pod-brain.Dockerfile b/pod-brain.Dockerfile new file mode 100644 index 00000000000..9b48c9512ef --- /dev/null +++ b/pod-brain.Dockerfile @@ -0,0 +1,20 @@ +# Use to build workflow-pod-brain image + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +WORKDIR /core/workflow-pod-brain +COPY core/workflow-pod-brain . +RUN sbt clean package +RUN apt-get update +RUN apt-get install -y netcat unzip + +WORKDIR /core +COPY core/scripts ./scripts +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +RUN scripts/build-brain.sh + +CMD ["scripts/deploy-brain.sh"] + +EXPOSE 8888 diff --git a/pod.Dockerfile b/pod.Dockerfile new file mode 100644 index 00000000000..ecc42c73c0f --- /dev/null +++ b/pod.Dockerfile @@ -0,0 +1,25 @@ +# Use to build image of solely Texera's backend + +FROM sbtscala/scala-sbt:eclipse-temurin-jammy-11.0.17_8_1.9.3_2.13.11 + +WORKDIR /core/amber +COPY core/amber . +RUN sbt clean package +RUN apt-get update +RUN apt-get install -y netcat unzip python3-pip +RUN pip3 install python-lsp-server python-lsp-server[websockets] +RUN pip3 install -r requirements.txt +RUN pip3 install -r operator-requirements.txt + +WORKDIR /core +COPY core/scripts ./scripts +# Add .git for runtime calls to jgit from OPversion +COPY .git ../.git + +COPY diabetes.csv ./diabetes.csv + +RUN scripts/build-docker.sh + +CMD ["scripts/deploy-docker-trap.sh"] + +EXPOSE 8080