diff --git a/agni/.env.local.sample b/agni/.env.local.sample new file mode 100644 index 0000000000..90e71fb376 --- /dev/null +++ b/agni/.env.local.sample @@ -0,0 +1,2 @@ +export SEARCH_SERVER=elasticsearch://10.240.0.18:9300 +export PORT=9000 diff --git a/search-service/.gitignore b/agni/.gitignore similarity index 100% rename from search-service/.gitignore rename to agni/.gitignore diff --git a/search-service/Makefile b/agni/Makefile similarity index 61% rename from search-service/Makefile rename to agni/Makefile index 6b917ba8da..d5c6eab75f 100644 --- a/search-service/Makefile +++ b/agni/Makefile @@ -1,14 +1,21 @@ include ../makelib -header = $(call baseheader, $(1), search-service) +header = $(call baseheader, $(1), agni) DOCKER_REPO ?= $(DOCKER_STAGE_REPO) -DOCKER_IMAGE ?= search-service +DOCKER_IMAGE ?= agni DOCKER_TAG ?= master SBT_CMD = sbt -DDOCKER_REPO=$(DOCKER_REPO) -DDOCKER_TAG=${DOCKER_TAG} +autoformat-check: + ../utils/scalafmt/scalafmt.sh --test + +run: + $(call header, Run locally) + sbt '~api/re-start' + clean: $(call header, Cleaning) - ${SBT_CMD} 'clean' + ${SBT_CMD} '; clean' build: fmt-check $(call header, Building) @@ -22,17 +29,17 @@ fmt-check: test: $(call header, Testing) - ${SBT_CMD} 'test' + ${SBT_CMD} '; test' docker: $(call header, Dockerizing) - ${SBT_CMD} 'api/docker' + ${SBT_CMD} '; api/docker' docker-push: $(call header, Registering) - ${SBT_CMD} 'api/dockerPush' + ${SBT_CMD} '; api/dockerPush' -docker-build: - ${SBT_CMD} 'api/dockerBuildAndPush' +docker-all: + ${SBT_CMD} '; api/dockerBuildAndPush' -.PHONY: clean build fmt fmt-check test docker docker-push docker-build +.PHONY: clean build fmt fmt-check test docker docker-push docker-all diff --git a/agni/api/app/foxcomm/agni/api/Api.scala b/agni/api/app/foxcomm/agni/api/Api.scala new file mode 100644 index 0000000000..621f86b0dd --- /dev/null +++ b/agni/api/app/foxcomm/agni/api/Api.scala @@ -0,0 +1,54 @@ +package foxcomm.agni.api + +import com.twitter.finagle.Http +import com.twitter.finagle.http.Status +import com.twitter.util.Await +import foxcomm.agni._ +import foxcomm.agni.dsl.query._ +import foxcomm.agni.interpreter.es.queryInterpreter +import foxcomm.utils.finch._ +import io.circe.generic.extras.auto._ +import io.finch._ +import io.finch.circe._ +import monix.execution.Scheduler +import org.elasticsearch.common.ValidationException + +object Api extends App { + def endpoints(searchService: SearchService)(implicit s: Scheduler) = + post("api" :: "search" :: "translate" :: jsonBody[SearchPayload.fc]) { (searchPayload: SearchPayload.fc) ⇒ + searchService + .translate(searchPayload = searchPayload) + .map(Ok) + .toTwitterFuture + } :+: post( + "api" :: "search" :: string :: string :: param("size") + .as[Int] :: paramOption("from").as[Int] :: jsonBody[SearchPayload]) { + (searchIndex: String, searchType: String, size: Int, from: Option[Int], searchQuery: SearchPayload) ⇒ + searchService + .searchFor(searchIndex = searchIndex, + searchType = searchType, + searchPayload = searchQuery, + searchSize = size, + searchFrom = from) + .map(Ok) + .toTwitterFuture + } :+: get("ping") { + Ok("pong") + } + + def errorHandler[A]: PartialFunction[Throwable, Output[A]] = { + case ex: ValidationException ⇒ Output.failure(ex, Status.BadRequest) + case ex: Exception ⇒ Output.failure(ex, Status.InternalServerError) + case ex ⇒ Output.failure(new RuntimeException(ex), Status.InternalServerError) + } + + implicit val s: Scheduler = Scheduler.global + val config = AppConfig.load() + val svc = SearchService.fromConfig(config, queryInterpreter) + + Await.result( + Http.server + .withStreaming(enabled = true) + .serve(s"${config.http.interface}:${config.http.port}", + endpoints(svc).handle(errorHandler).toServiceAs[Application.Json])) +} diff --git a/search-service/build.sbt b/agni/build.sbt similarity index 77% rename from search-service/build.sbt rename to agni/build.sbt index b535ff40b8..d35beaca1a 100644 --- a/search-service/build.sbt +++ b/agni/build.sbt @@ -1,20 +1,15 @@ -import sbtassembly.AssemblyKeys.assemblyExcludedJars import sbtassembly.{MergeStrategy, PathList} -name := "search-service" - -version := "0.1-SNAPSHOT" - lazy val core = (project in file("core")) .settings(Settings.common) .settings( - libraryDependencies ++= Dependencies.core ++ Dependencies.es ++ Dependencies.circe + libraryDependencies ++= Dependencies.core ++ Dependencies.es ++ Dependencies.circe ++ Dependencies.monix ++ Dependencies.test.core ) lazy val finch = (project in file("finch")) .settings(Settings.common) .settings( - libraryDependencies ++= Dependencies.finch ++ Dependencies.circe :+ Dependencies.jwt + libraryDependencies ++= Dependencies.finch ++ Dependencies.circe ++ Dependencies.jwt ++ Dependencies.monix ) lazy val api = (project in file("api")) @@ -24,8 +19,9 @@ lazy val api = (project in file("api")) libraryDependencies ++= Dependencies.finch ) .settings( - mainClass in assembly := Some("foxcomm.search.api.Api"), - assemblyJarName in assembly := "search-service.jar", + Settings.appName := "agni", + mainClass in assembly := Some("foxcomm.agni.api.Api"), + assemblyJarName in assembly := s"${Settings.appName.value}.jar", assemblyMergeStrategy in assembly := { case PathList("BUILD") ⇒ MergeStrategy.discard case PathList("META-INF", "io.netty.versions.properties") ⇒ MergeStrategy.discard diff --git a/agni/core/app/foxcomm/agni/AppConfig.scala b/agni/core/app/foxcomm/agni/AppConfig.scala new file mode 100644 index 0000000000..9ee338a4a6 --- /dev/null +++ b/agni/core/app/foxcomm/agni/AppConfig.scala @@ -0,0 +1,36 @@ +package foxcomm.agni + +import cats.data.NonEmptyList +import com.typesafe.config.ConfigFactory +import java.net.InetSocketAddress +import pureconfig._ +import scala.util.Try + +final case class AppConfig(http: AppConfig.Http, elasticsearch: AppConfig.ElasticSearch) + +@SuppressWarnings(Array("org.wartremover.warts.Equals")) +object AppConfig { + implicit val readHostConfig: ConfigReader[NonEmptyList[InetSocketAddress]] = + ConfigReader.fromNonEmptyStringTry(s ⇒ + Try { + val withoutPrefix = s.stripPrefix("elasticsearch://") + val hosts = withoutPrefix.split(',').map { host ⇒ + val parts = host.split(':') + require(parts.length == 2, + "ElasticSearch uri must be in format elasticsearch://host:port,host:port,...") + new InetSocketAddress(parts(0), parts(1).toInt) + } + require(hosts.length >= 1, "At least single ElasticSearch host should be specified") + NonEmptyList.fromListUnsafe(hosts.toList) + }) + + final case class Http(interface: String, port: Int) + + final case class ElasticSearch(host: NonEmptyList[InetSocketAddress], cluster: String) + + def load(): AppConfig = { + val config = + ConfigFactory.systemProperties.withFallback(ConfigFactory.load()) + loadConfigOrThrow[AppConfig](config, "app") + } +} diff --git a/agni/core/app/foxcomm/agni/SearchService.scala b/agni/core/app/foxcomm/agni/SearchService.scala new file mode 100644 index 0000000000..ad017d659b --- /dev/null +++ b/agni/core/app/foxcomm/agni/SearchService.scala @@ -0,0 +1,108 @@ +package foxcomm.agni + +import cats.implicits._ +import foxcomm.agni.interpreter.es._ +import io.circe._ +import io.circe.jawn.parseByteBuffer +import monix.eval.{Coeval, Task} +import org.elasticsearch.action.search.{SearchAction, SearchRequestBuilder, SearchResponse} +import org.elasticsearch.client.Client +import org.elasticsearch.client.transport.TransportClient +import org.elasticsearch.common.settings.Settings +import org.elasticsearch.common.transport.InetSocketTransportAddress +import org.elasticsearch.common.xcontent.{ToXContent, XContentFactory} +import org.elasticsearch.index.query.QueryBuilder +import org.elasticsearch.search.SearchHit + +@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) +class SearchService private (client: Client, qi: ESQueryInterpreter) { + import SearchService.ExtractJsonObject + + def translate(searchPayload: SearchPayload.fc): Task[Json] = { + def buildJson(qb: QueryBuilder): Coeval[Json] = + Coeval.eval { + val builder = XContentFactory.jsonBuilder() + builder.prettyPrint() + builder.startObject() + builder.field("query") + qb.toXContent(builder, ToXContent.EMPTY_PARAMS) + builder.endObject() + parseByteBuffer(builder.bytes().toChannelBuffer.toByteBuffer) + .fold(Coeval.raiseError(_), Coeval.eval(_)) + }.flatten + + for { + builder ← qi(searchPayload.query).task + json ← buildJson(builder).task + } yield json + } + + def searchFor(searchIndex: String, + searchType: String, + searchPayload: SearchPayload, + searchSize: Int, + searchFrom: Option[Int]): Task[SearchResult] = { + def prepareBuilder: Coeval[SearchRequestBuilder] = Coeval.eval { + val builder = new SearchRequestBuilder(client, SearchAction.INSTANCE) + builder + .setIndices(searchIndex) + .setTypes(searchType) + .setSize(searchSize) + searchFrom.foreach(builder.setFrom) + searchPayload.fields.foreach(fs ⇒ builder.setFetchSource(fs.toList.toArray, Array.empty[String])) + builder + } + + def evalQuery(builder: SearchRequestBuilder): Coeval[SearchRequestBuilder] = searchPayload match { + case SearchPayload.es(query, _) ⇒ + Coeval.eval(builder.setQuery(Json.fromJsonObject(query).toBytes)) + case SearchPayload.fc(query, _) ⇒ + qi(query).map(builder.setQuery) + } + + def setupBuilder: Task[SearchRequestBuilder] = (prepareBuilder flatMap evalQuery).task + + for { + builder ← setupBuilder + request = builder.request() + response ← async[SearchResponse, SearchResult](client.search(request, _)) + } yield { + val hits = response.getHits + SearchResult( + result = hits + .hits() + .view + .collect { + case ExtractJsonObject(obj) ⇒ obj + } + .toList, + pagination = SearchPagination(total = hits.totalHits()), + maxScore = hits.getMaxScore + ) + } + } +} + +object SearchService { + object ExtractJsonObject { + def unapply(hit: SearchHit): Option[JsonObject] = + parseByteBuffer(hit.sourceRef.toChannelBuffer.toByteBuffer).toOption + .flatMap(_.asObject) + } + + def apply(client: Client, qi: ESQueryInterpreter): SearchService = + new SearchService(client, qi) + + def fromConfig(config: AppConfig, qi: ESQueryInterpreter): SearchService = { + val esConfig = config.elasticsearch + val settings = + Settings.settingsBuilder().put("cluster.name", esConfig.cluster).build() + val client = TransportClient + .builder() + .settings(settings) + .build() + .addTransportAddresses(esConfig.host.toList.map(new InetSocketTransportAddress(_)): _*) + + apply(client, qi) + } +} diff --git a/agni/core/app/foxcomm/agni/dsl/package.scala b/agni/core/app/foxcomm/agni/dsl/package.scala new file mode 100644 index 0000000000..2aeb24ae85 --- /dev/null +++ b/agni/core/app/foxcomm/agni/dsl/package.scala @@ -0,0 +1,7 @@ +package foxcomm.agni + +import io.circe.generic.extras.Configuration + +package object dsl { + implicit def configuration: Configuration = foxcomm.agni.configuration +} diff --git a/agni/core/app/foxcomm/agni/dsl/query.scala b/agni/core/app/foxcomm/agni/dsl/query.scala new file mode 100644 index 0000000000..4bc9dbea96 --- /dev/null +++ b/agni/core/app/foxcomm/agni/dsl/query.scala @@ -0,0 +1,292 @@ +package foxcomm.agni.dsl + +import scala.language.higherKinds +import cats.data.{NonEmptyList, NonEmptyVector} +import cats.implicits._ +import io.circe._ +import io.circe.generic.extras.semiauto._ +import scala.util.Try +import shapeless._ + +object query { + type QueryValueF[F[_], T] = T :+: F[T] :+: CNil + type QueryValue[T] = QueryValueF[NonEmptyList, T] + type CompoundValue = QueryValue[JsonNumber] :+: QueryValue[String] :+: CNil + type Field = QueryValueF[NonEmptyVector, String] + type RangeValue = RangeBound[JsonNumber] :+: RangeBound[String] :+: CNil + + implicit class RichQueryValue[T](val qv: QueryValue[T]) extends AnyVal { + def toNEL: NonEmptyList[T] = qv.eliminate(NonEmptyList.of(_), _.eliminate(identity, _.impossible)) + + def toList: List[T] = toNEL.toList + } + + implicit class RichCompoundValue(val cv: CompoundValue) extends AnyVal { + def toNEL: NonEmptyList[AnyRef] = cv.eliminate(_.toNEL, _.eliminate(_.toNEL, _.impossible)) + + def toList: List[AnyRef] = toNEL.toList + } + + implicit def decodeQueryValueF[F[_], T](implicit fD: Decoder[F[T]], + tD: Decoder[T]): Decoder[QueryValueF[F, T]] = + tD.map(Coproduct[QueryValueF[F, T]](_)) or fD.map(Coproduct[QueryValueF[F, T]](_)) + + implicit val decodeCompoundValue: Decoder[CompoundValue] = + Decoder[QueryValue[JsonNumber]].map(Coproduct[CompoundValue](_)) or + Decoder[QueryValue[String]].map(Coproduct[CompoundValue](_)) + + implicit val decodeField: Decoder[Field] = Decoder.decodeString.map { s ⇒ + val xs = s.split("\\.") + if (xs.length > 1) Coproduct[Field](NonEmptyVector.of(xs.head, xs.tail: _*)) + else Coproduct[Field](s) + } + + implicit val decodeRange: Decoder[RangeValue] = + Decoder[RangeBound[JsonNumber]].map(Coproduct[RangeValue](_)) or + Decoder[RangeBound[String]].map(Coproduct[RangeValue](_)) + + object Boostable { + private[this] val boostableRegex = "^(\\w+)\\^([0-9]*\\.?[0-9]+)$".r + + def unapply(s: String): Option[(String, Float)] = s match { + case boostableRegex(f, b) ⇒ Try(f → b.toFloat).toOption + case _ ⇒ None + } + + def default: Float = 1.0f + } + + sealed trait QueryField { + def toNEL: NonEmptyList[Field] + + def toList: List[Field] = toNEL.toList + } + object QueryField { + final case class Value(field: String, boost: Option[Float]) + + final case class Single(field: Field) extends QueryField { + def toNEL: NonEmptyList[Field] = NonEmptyList.of(field) + } + object Single { + implicit val decodeSingle: Decoder[Single] = Decoder[Field].map(Single(_)) + } + + final case class Multiple(fields: NonEmptyList[Field]) extends QueryField { + def toNEL: NonEmptyList[Field] = fields + } + object Multiple { + implicit val decodeMultiple: Decoder[Multiple] = + Decoder.decodeNonEmptyList[Field].map(Multiple(_)) + } + + implicit val decodeQueryField: Decoder[QueryField] = + Decoder[Single].map(identity) or Decoder[Multiple].map(identity) + } + + sealed trait QueryContext + object QueryContext { + case object filter extends QueryContext + case object must extends QueryContext + case object should extends QueryContext + case object not extends QueryContext + + implicit val decodeQueryContext: Decoder[QueryContext] = deriveEnumerationDecoder[QueryContext] + } + + sealed trait RangeFunction + object RangeFunction { + sealed trait LowerBound extends RangeFunction { + def withBound: Boolean + } + case object Gt extends RangeFunction with LowerBound { + def withBound: Boolean = false + } + case object Gte extends RangeFunction with LowerBound { + def withBound: Boolean = true + } + + sealed trait UpperBound extends RangeFunction { + def withBound: Boolean + } + case object Lt extends RangeFunction with UpperBound { + def withBound: Boolean = false + } + case object Lte extends RangeFunction with UpperBound { + def withBound: Boolean = true + } + + implicit val decodeRangeFunction: KeyDecoder[RangeFunction] = KeyDecoder.instance { + case "lt" | "<" ⇒ Some(Lt) + case "lte" | "<=" ⇒ Some(Lte) + case "gt" | ">" ⇒ Some(Gt) + case "gte" | ">=" ⇒ Some(Gte) + } + } + + final case class RangeBound[T](lower: Option[(RangeFunction.LowerBound, T)], + upper: Option[(RangeFunction.UpperBound, T)]) + object RangeBound { + import RangeFunction._ + + implicit def decodeRangeBound[T: Decoder]: Decoder[RangeBound[T]] = + Decoder.decodeMapLike[Map, RangeFunction, T].emap { map ⇒ + val lbs = map.view.collect { + case (lb: LowerBound, v) ⇒ lb → v + }.toList + val ubs = map.view.collect { + case (ub: UpperBound, v) ⇒ ub → v + }.toList + + if (lbs.size > 1) Either.left("Only single lower bound can be specified") + else if (ubs.size > 1) Either.left("Only single upper bound can be specified") + else Either.right(RangeBound(lbs.headOption, ubs.headOption)) + } + } + + sealed trait QueryFunction + object QueryFunction { + val Discriminator = "type" + + private def buildQueryFunctionDecoder[A <: QueryFunction](expectedTpe: String, decoder: Decoder[A])( + onBoost: (Float, HCursor, Decoder[A]) ⇒ Decoder.Result[A]) = + Decoder.instance { c ⇒ + val tpe = c.downField(Discriminator).focus.flatMap(_.asString) + tpe match { + case Some(Boostable(`expectedTpe`, b)) ⇒ onBoost(b, c, decoder) + case Some(`expectedTpe`) ⇒ decoder(c) + case _ ⇒ Either.left(DecodingFailure("Unknown query function type", c.history)) + } + } + + private def buildBoostableDecoder[A <: QueryFunction](expectedTpe: String)(decoder: Decoder[A]) = + buildQueryFunctionDecoder[A](expectedTpe, decoder)((boost, cursor, decoder) ⇒ + decoder.tryDecode(cursor.withFocus(_.mapObject(_.add("boost", Json.fromFloatOrNull(boost)))))) + + private def buildDecoder[A <: QueryFunction](expectedTpe: String)(decoder: Decoder[A]): Decoder[A] = + buildQueryFunctionDecoder[A](expectedTpe, decoder)((_, cursor, _) ⇒ + Either.left(DecodingFailure(s"$expectedTpe query function is not boostable", cursor.history))) + + sealed trait WithField { this: QueryFunction ⇒ + def field: QueryField + def boost: Option[Float] + } + sealed trait WithContext { this: QueryFunction ⇒ + def ctx: QueryContext + } + + sealed trait TermLevel extends WithContext { this: QueryFunction ⇒ + def context: Option[QueryContext] + + final def ctx: QueryContext = context.getOrElse(QueryContext.filter) + } + sealed trait FullText extends WithContext with WithField { this: QueryFunction ⇒ + def context: Option[QueryContext] + def in: Option[QueryField] + + final def ctx: QueryContext = context.getOrElse(QueryContext.must) + final def field: QueryField = in.getOrElse(QueryField.Single(Coproduct("_all"))) + } + + final case class matches private (in: Option[QueryField], + value: QueryValue[String], + context: Option[QueryContext], + boost: Option[Float]) + extends QueryFunction + with FullText + object matches { + implicit val decodeMatches: Decoder[matches] = buildBoostableDecoder("matches")(deriveDecoder[matches]) + } + + final case class equals private (in: QueryField, + value: CompoundValue, + context: Option[QueryContext], + boost: Option[Float]) + extends QueryFunction + with TermLevel + with WithField { + def field: QueryField = in + } + object equals { + implicit val decodeEquals: Decoder[equals] = buildBoostableDecoder("equals")(deriveDecoder[equals]) + } + + final case class exists private (value: QueryField, context: Option[QueryContext]) + extends QueryFunction + with TermLevel + object exists { + implicit val decodeExists: Decoder[exists] = buildDecoder("exists")(deriveDecoder[exists]) + } + + final case class range private (in: QueryField.Single, + value: RangeValue, + context: Option[QueryContext], + boost: Option[Float]) + extends QueryFunction + with TermLevel + with WithField { + def field: QueryField.Single = in + } + object range { + implicit val decodeRange: Decoder[range] = buildBoostableDecoder("range")(deriveDecoder[range]) + } + + final case class raw private (value: JsonObject, context: QueryContext) + extends QueryFunction + with WithContext { + def ctx: QueryContext = context + } + object raw { + implicit val decodeRaw: Decoder[raw] = buildDecoder("raw")(deriveDecoder[raw]) + } + + final case class bool private (value: QueryValue[QueryFunction], context: QueryContext) + extends QueryFunction + with WithContext { + def ctx: QueryContext = context + } + object bool { + // TODO: make it configurable (?) + val MaxDepth = 25 + + implicit val decodeBool: Decoder[bool] = buildDecoder[bool]("bool") { + val decoder = deriveDecoder[bool] + val depthField = "_depth" + + Decoder.instance { c ⇒ + val depth = (for { + parent ← c.up.focus + parent ← parent.asObject + depth ← parent(depthField) + depth ← depth.as[Int].toOption + } yield depth).getOrElse(1) + + // we start counting from 0, + // which denotes implicit top-level bool query + if (depth >= MaxDepth) + Either.left(DecodingFailure(s"Max depth of $MaxDepth exceeded for a bool query", c.history)) + else + decoder.tryDecode(c.withFocus(_.mapObject(_.add(depthField, Json.fromInt(depth + 1))))) + } + } + } + + implicit val decodeQueryFunction: Decoder[QueryFunction] = + Decoder[matches].map(identity[QueryFunction](_)) or + Decoder[equals].map(identity[QueryFunction](_)) or + Decoder[exists].map(identity[QueryFunction](_)) or + Decoder[range].map(identity[QueryFunction](_)) or + Decoder[raw].map(identity[QueryFunction](_)) or + Decoder[bool].map(identity[QueryFunction](_)) + } + + final case class FCQuery(query: Option[NonEmptyList[QueryFunction]]) + object FCQuery { + implicit val decodeFCQuery: Decoder[FCQuery] = { + Decoder + .decodeOption( + Decoder.decodeNonEmptyList[QueryFunction] or + Decoder[QueryFunction].map(NonEmptyList.of(_))) + .map(FCQuery(_)) + } + } +} diff --git a/agni/core/app/foxcomm/agni/interpreter/QueryInterpreter.scala b/agni/core/app/foxcomm/agni/interpreter/QueryInterpreter.scala new file mode 100644 index 0000000000..9af3d8445a --- /dev/null +++ b/agni/core/app/foxcomm/agni/interpreter/QueryInterpreter.scala @@ -0,0 +1,37 @@ +package foxcomm.agni.interpreter + +import scala.language.higherKinds +import cats.data.NonEmptyList +import foxcomm.agni.dsl.query._ + +sealed trait QueryError +object QueryError {} + +trait QueryInterpreter[F[_], V] extends Interpreter[F, NonEmptyList[QueryFunction], V] { + type Result = V + + final def eval(qf: QueryFunction): F[Result] = qf match { + case qf: QueryFunction.matches ⇒ matchesF(qf) + case qf: QueryFunction.equals ⇒ equalsF(qf) + case qf: QueryFunction.exists ⇒ existsF(qf) + case qf: QueryFunction.range ⇒ rangeF(qf) + case qf: QueryFunction.raw ⇒ rawF(qf) + case qf: QueryFunction.bool ⇒ boolF(qf) + } + + def matchesF(qf: QueryFunction.matches): F[Result] + + def equalsF(qf: QueryFunction.equals): F[Result] + + def existsF(qf: QueryFunction.exists): F[Result] + + def rangeF(qf: QueryFunction.range): F[Result] + + def rawF(qf: QueryFunction.raw): F[Result] + + def boolF(qf: QueryFunction.bool): F[Result] +} + +object QueryInterpreter { + @inline implicit def apply[F[_], V](implicit qi: QueryInterpreter[F, V]): QueryInterpreter[F, V] = qi +} diff --git a/agni/core/app/foxcomm/agni/interpreter/es/ESQueryInterpreter.scala b/agni/core/app/foxcomm/agni/interpreter/es/ESQueryInterpreter.scala new file mode 100644 index 0000000000..d8f850cc2d --- /dev/null +++ b/agni/core/app/foxcomm/agni/interpreter/es/ESQueryInterpreter.scala @@ -0,0 +1,137 @@ +package foxcomm.agni.interpreter.es + +import cats.Id +import cats.data._ +import cats.implicits._ +import foxcomm.agni._ +import foxcomm.agni.dsl.query._ +import foxcomm.agni.interpreter.QueryInterpreter +import io.circe.JsonObject +import org.elasticsearch.common.xcontent.{ToXContent, XContentBuilder} +import org.elasticsearch.index.query._ +import scala.annotation.tailrec + +@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements", "org.wartremover.warts.TraversableOps")) +private[es] object ESQueryInterpreter + extends QueryInterpreter[Kleisli[Id, ?, BoolQueryBuilder], BoolQueryBuilder] { + type State = Kleisli[Id, BoolQueryBuilder, BoolQueryBuilder] + val State: (BoolQueryBuilder ⇒ Id[BoolQueryBuilder]) ⇒ State = + Kleisli[Id, BoolQueryBuilder, BoolQueryBuilder] + + private def buildNestedQuery(path: NonEmptyVector[String])(q: String ⇒ QueryBuilder): QueryBuilder = { + @tailrec def rec(path: Vector[String], acc: QueryBuilder): QueryBuilder = path match { + case Vector() :+ l ⇒ + QueryBuilders.nestedQuery(l, acc) + case i :+ l ⇒ + rec(i, QueryBuilders.nestedQuery(l, acc)) + } + + val combined = path.tail.iterator.scanLeft(path.head)((p, f) ⇒ s"$p.$f").toVector + rec(combined.init, q(combined.last)) + } + + private implicit class RichBoolQueryBuilder(val b: BoolQueryBuilder) extends AnyVal { + def inContext(qf: QueryFunction.WithContext)(qb: QueryBuilder): BoolQueryBuilder = qf.ctx match { + case QueryContext.filter ⇒ b.filter(qb) + case QueryContext.must ⇒ b.must(qb) + case QueryContext.should ⇒ b.should(qb) + case QueryContext.not ⇒ b.mustNot(qb) + } + } + + private implicit class RichBoostableQueryBuilder[B <: QueryBuilder with BoostableQueryBuilder[B]](val b: B) + extends AnyVal { + def withBoost(qf: QueryFunction.WithField): B = qf.boost.fold(b)(b.boost) + } + + private implicit class RichField(val f: Field) extends AnyVal { + def nest(q: String ⇒ QueryBuilder): QueryBuilder = + f.eliminate( + q, + _.eliminate( + buildNestedQuery(_)(q), + _.impossible + ) + ) + } + + private final case class RawQueryBuilder(content: JsonObject) extends QueryBuilder { + + def doXContent(builder: XContentBuilder, params: ToXContent.Params): Unit = + content.toMap.foreach { + case (n, v) ⇒ + builder.rawField(n, v.toSmile) + } + } + + def apply(qfs: NonEmptyList[QueryFunction]): State = State { b ⇒ + qfs.foldM(b)((b, qf) ⇒ eval(qf)(b): Id[BoolQueryBuilder]) + } + + def matchesF(qf: QueryFunction.matches): State = State { b ⇒ + val inContext = b.inContext(qf) _ + for (v ← qf.value.toList) { + qf.field match { + case QueryField.Single(n) ⇒ inContext(n.nest(QueryBuilders.matchQuery(_, v).withBoost(qf))) + case QueryField.Multiple(ns) ⇒ + val (sfs, nfs) = ns.foldLeft(Vector.empty[String] → Vector.empty[NonEmptyVector[String]]) { + case ((sAcc, nAcc), f) ⇒ + f.select[String].fold(sAcc)(sAcc :+ _) → + f.select[NonEmptyVector[String]].fold(nAcc)(nAcc :+ _) + } + if (sfs.nonEmpty) inContext(QueryBuilders.multiMatchQuery(v, sfs: _*).withBoost(qf)) + nfs.foreach(nf ⇒ inContext(buildNestedQuery(nf)(QueryBuilders.matchQuery(_, v).withBoost(qf)))) + } + } + b + } + + def equalsF(qf: QueryFunction.equals): State = State { b ⇒ + val inContext = b.inContext(qf) _ + val vs = qf.value.toList + for (f ← qf.in.toList) { + inContext { + vs match { + case v :: Nil ⇒ f.nest(QueryBuilders.termQuery(_, v).withBoost(qf)) + case _ ⇒ f.nest(QueryBuilders.termsQuery(_, vs: _*).withBoost(qf)) + } + } + } + b + } + + def existsF(qf: QueryFunction.exists): State = State { b ⇒ + val inContext = b.inContext(qf) _ + qf.value.toList.foreach(f ⇒ inContext(f.nest(QueryBuilders.existsQuery))) + b + } + + def rangeF(qf: QueryFunction.range): State = State { b ⇒ + b.inContext(qf) { + qf.in.field.nest { f ⇒ + val builder = QueryBuilders.rangeQuery(f).withBoost(qf) + val value = qf.value.unify + value.lower.foreach { + case (RangeFunction.Gt, v) ⇒ builder.gt(v) + case (RangeFunction.Gte, v) ⇒ builder.gte(v) + } + value.upper.foreach { + case (RangeFunction.Lt, v) ⇒ builder.lt(v) + case (RangeFunction.Lte, v) ⇒ builder.lte(v) + } + builder + } + } + b + } + + def rawF(qf: QueryFunction.raw): State = State { b ⇒ + b.inContext(qf)(RawQueryBuilder(qf.value)) + b + } + + def boolF(qf: QueryFunction.bool): State = State { b ⇒ + b.inContext(qf)(apply(qf.value.toNEL)(QueryBuilders.boolQuery())) + b + } +} diff --git a/agni/core/app/foxcomm/agni/interpreter/es/package.scala b/agni/core/app/foxcomm/agni/interpreter/es/package.scala new file mode 100644 index 0000000000..21ce4b006a --- /dev/null +++ b/agni/core/app/foxcomm/agni/interpreter/es/package.scala @@ -0,0 +1,16 @@ +package foxcomm.agni.interpreter + +import cats.data._ +import foxcomm.agni.dsl.query.{FCQuery, QueryFunction} +import monix.eval.Coeval +import org.elasticsearch.index.query.{BoolQueryBuilder, QueryBuilders} + +package object es { + type ESQueryInterpreter = Kleisli[Coeval, FCQuery, BoolQueryBuilder] + + val queryInterpreter: ESQueryInterpreter = { + val eval: Interpreter[Coeval, NonEmptyList[QueryFunction], BoolQueryBuilder] = + ESQueryInterpreter andThen (f ⇒ Coeval.eval(f(QueryBuilders.boolQuery()))) + Kleisli(_.query.fold(Coeval.eval(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery())))(eval)) + } +} diff --git a/agni/core/app/foxcomm/agni/interpreter/package.scala b/agni/core/app/foxcomm/agni/interpreter/package.scala new file mode 100644 index 0000000000..8acaa63617 --- /dev/null +++ b/agni/core/app/foxcomm/agni/interpreter/package.scala @@ -0,0 +1,7 @@ +package foxcomm.agni + +import scala.language.higherKinds + +package object interpreter { + type Interpreter[F[_], A, B] = A ⇒ F[B] +} diff --git a/agni/core/app/foxcomm/agni/package.scala b/agni/core/app/foxcomm/agni/package.scala new file mode 100644 index 0000000000..61c553e078 --- /dev/null +++ b/agni/core/app/foxcomm/agni/package.scala @@ -0,0 +1,47 @@ +package foxcomm + +import com.fasterxml.jackson.core.JsonFactory +import com.fasterxml.jackson.dataformat.smile.SmileFactory +import io.circe.generic.extras.Configuration +import io.circe.{Json, Printer} +import java.io.ByteArrayOutputStream +import monix.eval.Task +import org.elasticsearch.action.ActionListener +import scala.concurrent.Promise + +package object agni { + private[this] val smileFactory = new SmileFactory() + private[this] val jsonFactory = new JsonFactory() + + implicit val configuration: Configuration = + Configuration.default.withDefaults.withDiscriminator("type").withSnakeCaseKeys + + @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) + def async[A, B](action: ActionListener[A] ⇒ Any): Task[A] = Task.deferFuture { + val p = Promise[A]() + action(new ActionListener[A] { + def onFailure(e: Throwable): Unit = p.tryFailure(e) + + def onResponse(response: A): Unit = p.trySuccess(response) + }) + p.future + } + + @SuppressWarnings(Array("org.wartremover.warts.While")) + implicit class RichJson(val j: Json) extends AnyVal { + def toBytes: Array[Byte] = Printer.noSpaces.prettyByteBuffer(j).array() + + def toSmile: Array[Byte] = { + val bos = new ByteArrayOutputStream() + val jg = smileFactory.createGenerator(bos) + val jp = jsonFactory.createParser(j.toBytes) + try while (jp.nextToken() ne null) { + jg.copyCurrentEvent(jp) + } finally { + jp.close() + jg.close() + } + bos.toByteArray + } + } +} diff --git a/agni/core/app/foxcomm/agni/payload.scala b/agni/core/app/foxcomm/agni/payload.scala new file mode 100644 index 0000000000..38d0389df6 --- /dev/null +++ b/agni/core/app/foxcomm/agni/payload.scala @@ -0,0 +1,13 @@ +package foxcomm.agni + +import cats.data.NonEmptyList +import foxcomm.agni.dsl.query._ +import io.circe.JsonObject + +sealed trait SearchPayload { + def fields: Option[NonEmptyList[String]] +} +object SearchPayload { + final case class es(query: JsonObject, fields: Option[NonEmptyList[String]]) extends SearchPayload + final case class fc(query: FCQuery, fields: Option[NonEmptyList[String]]) extends SearchPayload +} diff --git a/search-service/core/app/foxcomm/search/response.scala b/agni/core/app/foxcomm/agni/response.scala similarity index 70% rename from search-service/core/app/foxcomm/search/response.scala rename to agni/core/app/foxcomm/agni/response.scala index 345c1d0e5b..f385e6edbc 100644 --- a/search-service/core/app/foxcomm/search/response.scala +++ b/agni/core/app/foxcomm/agni/response.scala @@ -1,7 +1,7 @@ -package foxcomm.search +package foxcomm.agni import io.circe.JsonObject -final case class SearchResult(result: List[JsonObject], pagination: SearchPagination) +final case class SearchResult(result: List[JsonObject], pagination: SearchPagination, maxScore: Float) final case class SearchPagination(total: Long) diff --git a/search-service/core/resources/reference.conf b/agni/core/resources/reference.conf similarity index 87% rename from search-service/core/resources/reference.conf rename to agni/core/resources/reference.conf index daf8c5e83f..c9bebdad37 100644 --- a/search-service/core/resources/reference.conf +++ b/agni/core/resources/reference.conf @@ -7,5 +7,6 @@ app { http { interface = "0.0.0.0" port = 9000 + port = ${?PORT} } } diff --git a/agni/core/test/foxcomm/agni/dsl/QueryDslSpec.scala b/agni/core/test/foxcomm/agni/dsl/QueryDslSpec.scala new file mode 100644 index 0000000000..342807a3fb --- /dev/null +++ b/agni/core/test/foxcomm/agni/dsl/QueryDslSpec.scala @@ -0,0 +1,106 @@ +package foxcomm.agni.dsl + +import cats.data.NonEmptyVector +import foxcomm.agni.dsl.query._ +import io.circe.{Json, JsonObject} +import io.circe.parser._ +import org.scalatest.EitherValues._ +import org.scalatest.OptionValues._ +import org.scalatest.{Assertion, FlatSpec, Matchers} +import scala.annotation.tailrec +import scala.io.Source +import shapeless._ +import shapeless.syntax.typeable._ + +class QueryDslSpec extends FlatSpec with Matchers { + implicit class RichRangeBound[A](val rb: RangeBound[A]) { + implicit def toMap: Map[RangeFunction, A] = Map.empty ++ rb.lower ++ rb.upper + } + + def assertQueryFunction[T <: QueryFunction: Typeable](qf: QueryFunction)( + assertion: T ⇒ Assertion): Assertion = + qf.cast[T] + .fold(fail(s"Cannot cast query function ${qf.getClass.getName} to ${Typeable[T].describe}"))(assertion) + + "DSL" should "parse multiple queries" in { + val json = + parse( + Source + .fromInputStream(getClass.getResourceAsStream("/query/multiple.json")) + .mkString).right.value + val queries = + json.as[FCQuery].right.value.query.map(_.toList).getOrElse(Nil) + assertQueryFunction[QueryFunction.equals](queries.head) { equals ⇒ + equals.field.toList should === (List(Coproduct[Field]("slug"))) + equals.ctx should === (QueryContext.must) + equals.context should be('defined) + equals.value.toList should === (List("awesome", "whatever")) + } + assertQueryFunction[QueryFunction.matches](queries(1)) { matches ⇒ + matches.field.toList should === ( + List(Coproduct[Field]("title"), + Coproduct[Field]("description"), + Coproduct[Field](NonEmptyVector.of("skus", "code")))) + matches.ctx should === (QueryContext.should) + matches.boost.value should === (0.5f) + matches.context should be('defined) + matches.value.toList should === (List("food", "drink")) + } + assertQueryFunction[QueryFunction.range](queries(2)) { range ⇒ + range.field.toList should === (List(Coproduct[Field]("price"))) + range.ctx should === (QueryContext.filter) + range.context should be('empty) + range.value.unify.toMap.mapValues(_.toString) should === ( + Map( + RangeFunction.Lt → "5000", + RangeFunction.Gte → "1000" + )) + } + assertQueryFunction[QueryFunction.exists](queries(3)) { exists ⇒ + exists.value.toList should === (List(Coproduct[Field]("archivedAt"))) + exists.ctx should === (QueryContext.not) + exists.context should be('defined) + } + assertQueryFunction[QueryFunction.raw](queries(4)) { raw ⇒ + raw.context should === (QueryContext.filter) + raw.value should === (JsonObject.singleton("match_all", Json.fromJsonObject(JsonObject.empty))) + } + assertQueryFunction[QueryFunction.bool](queries(5)) { bool ⇒ + bool.context should === (QueryContext.should) + val qfs = bool.value.toNEL + assertQueryFunction[QueryFunction.equals](qfs.head) { equals ⇒ + equals.field.toList should === (List(Coproduct[Field]("context"))) + equals.value.toList should === (List("default")) + } + assertQueryFunction[QueryFunction.bool](qfs.tail.head) { bool ⇒ + bool.context should === (QueryContext.not) + assertQueryFunction[QueryFunction.exists](bool.value.toNEL.head) { exists ⇒ + exists.value.toList should === (List(Coproduct[Field]("context"))) + } + } + } + } + + it should "limit max depth for bool query" in { + val leaf = JsonObject.fromMap( + Map( + "type" → Json.fromString("exists"), + "value" → Json.arr(Json.fromString("answer"), Json.fromString("to"), Json.fromString("everything")) + )) + + @tailrec + def deepBool(boolDepth: Int, embed: JsonObject): Json = + if (boolDepth > 0) + deepBool(boolDepth - 1, + JsonObject.fromMap( + Map( + "type" → Json.fromString("bool"), + "context" → Json.fromString("filter"), + "value" → Json.fromJsonObject(embed) + ))) + else Json.fromJsonObject(embed) + + deepBool(boolDepth = QueryFunction.bool.MaxDepth - 1, embed = leaf).as[FCQuery].isLeft should === (false) + deepBool(boolDepth = QueryFunction.bool.MaxDepth, embed = leaf).as[FCQuery].isLeft should === (true) + } +} diff --git a/agni/core/test/resources/query/multiple.json b/agni/core/test/resources/query/multiple.json new file mode 100644 index 0000000000..20a4543aa3 --- /dev/null +++ b/agni/core/test/resources/query/multiple.json @@ -0,0 +1,53 @@ +[ + { + "type": "equals", + "context": "must", + "in": "slug", + "value": [ "awesome", "whatever" ] + }, + { + "type": "matches^.5", + "context": "should", + "in": [ "title", "description", "skus.code" ], + "value": [ "food", "drink" ] + }, + { + "type": "range", + "in": "price", + "value": { + "<": 5000, + "gte": 1000 + } + }, + { + "type": "exists", + "context": "not", + "value": "archivedAt" + }, + { + "type": "raw", + "context": "filter", + "value": { + "match_all": {} + } + }, + { + "type": "bool", + "context": "should", + "value": [ + { + "type": "equals", + "in": "context", + "value": "default" + }, + { + "type": "bool", + "context": "not", + "value": { + "type": "exists", + "value": "context" + } + } + ] + } +] diff --git a/search-service/finch/app/foxcomm/utils/finch/Conversions.scala b/agni/finch/app/foxcomm/utils/finch/Conversions.scala similarity index 71% rename from search-service/finch/app/foxcomm/utils/finch/Conversions.scala rename to agni/finch/app/foxcomm/utils/finch/Conversions.scala index 0952292e97..11ca5ebfef 100644 --- a/search-service/finch/app/foxcomm/utils/finch/Conversions.scala +++ b/agni/finch/app/foxcomm/utils/finch/Conversions.scala @@ -5,9 +5,13 @@ import com.twitter.util.{Return, Throw, Future ⇒ TFuture, Promise ⇒ TPromise import scala.concurrent.{ExecutionContext, Future ⇒ SFuture, Promise ⇒ SPromise} import scala.util.{Failure, Success} import Conversions._ +import monix.eval.{Callback, Task} +import monix.execution.Scheduler @SuppressWarnings(Array("org.wartremover.warts.ImplicitConversion")) trait Conversions { + implicit def toRichTask[A](task: Task[A]): RichTask[A] = new RichTask(task) + implicit def toRichSFuture[A](future: SFuture[A]): RichSFuture[A] = new RichSFuture(future) implicit def toRichTFuture[A](future: TFuture[A]): RichTFuture[A] = new RichTFuture(future) @@ -15,6 +19,18 @@ trait Conversions { @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) object Conversions { + implicit class RichTask[A](val task: Task[A]) extends AnyVal { + def toTwitterFuture(implicit s: Scheduler): TFuture[A] = { + val result = TPromise[A]() + task.runAsync(new Callback[A] { + def onError(ex: Throwable): Unit = result.setException(ex) + + def onSuccess(value: A): Unit = result.setValue(value) + }) + result + } + } + implicit class RichSFuture[A](val future: SFuture[A]) extends AnyVal { def toTwitterFuture(implicit ec: ExecutionContext): TFuture[A] = { val result = TPromise[A]() diff --git a/search-service/finch/app/foxcomm/utils/finch/JWT.scala b/agni/finch/app/foxcomm/utils/finch/JWT.scala similarity index 100% rename from search-service/finch/app/foxcomm/utils/finch/JWT.scala rename to agni/finch/app/foxcomm/utils/finch/JWT.scala diff --git a/search-service/finch/app/foxcomm/utils/finch/package.scala b/agni/finch/app/foxcomm/utils/finch/package.scala similarity index 100% rename from search-service/finch/app/foxcomm/utils/finch/package.scala rename to agni/finch/app/foxcomm/utils/finch/package.scala diff --git a/agni/project/Dependencies.scala b/agni/project/Dependencies.scala new file mode 100644 index 0000000000..715a1436b4 --- /dev/null +++ b/agni/project/Dependencies.scala @@ -0,0 +1,50 @@ +import sbt._ + +object Dependencies { + object versions { + val cats = "0.9.0" + val circe = "0.8.0" + val elasticsearch = "2.1.2" + val finch = "0.14.0" + val monix = "2.3.0" + } + + val core = Seq( + "com.github.pureconfig" %% "pureconfig" % "0.7.2", + "com.typesafe" % "config" % "1.3.1", + "org.typelevel" %% "cats-core" % versions.cats + ) + + val es = Seq( + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-smile" % "2.8.2", + "org.elasticsearch" % "elasticsearch" % versions.elasticsearch + ) + + val circe = Seq( + "io.circe" %% "circe-core" % versions.circe, + "io.circe" %% "circe-generic-extras" % versions.circe, + "io.circe" %% "circe-parser" % versions.circe + ) + + val finch = Seq( + "com.github.finagle" %% "finch-circe" % versions.finch, + "com.github.finagle" %% "finch-core" % versions.finch, + "com.github.finagle" %% "finch-generic" % versions.finch + ) + + val jwt = Seq( + "com.pauldijou" %% "jwt-core" % "0.12.1" + ) + + val monix = Seq( + "io.monix" %% "monix-cats" % versions.monix, + "io.monix" %% "monix-eval" % versions.monix + ) + + object test { + def core = + Seq( + "org.scalatest" %% "scalatest" % "3.0.3" + ).map(_ % "test") + } +} diff --git a/search-service/project/Settings.scala b/agni/project/Settings.scala similarity index 78% rename from search-service/project/Settings.scala rename to agni/project/Settings.scala index c2a2d56cb8..488334aaa2 100644 --- a/search-service/project/Settings.scala +++ b/agni/project/Settings.scala @@ -9,6 +9,8 @@ import sbtdocker.immutable.Dockerfile import wartremover.{wartremoverErrors, Wart, Warts} object Settings { + lazy val appName: TaskKey[String] = taskKey[String]("Name for a application") + def common: Seq[Def.Setting[_]] = Seq( scalaVersion := "2.11.11", scalacOptions in Compile ++= Seq( @@ -24,12 +26,15 @@ object Settings { "-Xfatal-warnings", "-Xfuture" ), + addCompilerPlugin("org.spire-math" %% "kind-projector" % "0.9.4"), wartremoverErrors in (Compile, compile) ++= Warts.allBut(Wart.Any, Wart.ImplicitParameter, Wart.Nothing, Wart.PublicInference), scalaSource in Compile := baseDirectory.value / "app", - resourceDirectory in Compile := baseDirectory.value / "resources" + resourceDirectory in Compile := baseDirectory.value / "resources", + scalaSource in Test := baseDirectory.value / "test", + resourceDirectory in Test := baseDirectory.value / "test" / "resources" ) def deploy: Seq[Def.Setting[_]] = Seq( @@ -45,11 +50,10 @@ object Settings { Dockerfile.empty .from("openjdk:8-alpine") .add(artifact, artifactTargetPath) - .cmdRaw(s"java -jar $artifactTargetPath") + .cmdRaw(s"java $$JAVA_OPTS -jar $artifactTargetPath 2>&1 | tee -a /logs/${appName.value}.log") }, imageNames in docker := Seq( - ImageName( - s"${sys.props("DOCKER_REPO")}/${(assemblyJarName in assembly).value.stripSuffix(".jar")}:${sys.props("DOCKER_TAG")}") + ImageName(s"${sys.props("DOCKER_REPO")}/${appName.value}:${sys.props("DOCKER_TAG")}") ) ) } diff --git a/search-service/project/assembly.sbt b/agni/project/assembly.sbt similarity index 100% rename from search-service/project/assembly.sbt rename to agni/project/assembly.sbt diff --git a/search-service/project/build.properties b/agni/project/build.properties similarity index 100% rename from search-service/project/build.properties rename to agni/project/build.properties diff --git a/search-service/project/docker.sbt b/agni/project/docker.sbt similarity index 100% rename from search-service/project/docker.sbt rename to agni/project/docker.sbt diff --git a/agni/project/sbt-resolver.sbt b/agni/project/sbt-resolver.sbt new file mode 100644 index 0000000000..8682fad3db --- /dev/null +++ b/agni/project/sbt-resolver.sbt @@ -0,0 +1 @@ +addSbtPlugin("io.spray" % "sbt-revolver" % "0.8.0") diff --git a/search-service/project/wartremover.sbt b/agni/project/wartremover.sbt similarity index 100% rename from search-service/project/wartremover.sbt rename to agni/project/wartremover.sbt diff --git a/ashes/src/elastic/activities.js b/ashes/src/elastic/activities.js index d8243f0030..ecc6b41fb7 100644 --- a/ashes/src/elastic/activities.js +++ b/ashes/src/elastic/activities.js @@ -1,5 +1,6 @@ import _ from 'lodash'; -import { post } from '../lib/search'; +import Agni from 'lib/agni'; +import { post } from 'lib/search'; import moment from 'moment'; import * as dsl from './dsl'; @@ -58,7 +59,7 @@ export default function searchActivities(fromActivity = null, trailParams, days const q = queryFirstActivity(); - promise = post('scoped_activity_trails/_search', q) + promise = Agni.post('scoped_activity_trails', q) .then(response => { const result = _.isEmpty(response.result) ? [] : response.result; _.set(response, 'result', result); diff --git a/ashes/src/elastic/customer-groups.js b/ashes/src/elastic/customer-groups.js index 3366d55a08..6affc1000a 100644 --- a/ashes/src/elastic/customer-groups.js +++ b/ashes/src/elastic/customer-groups.js @@ -1,10 +1,10 @@ /* @flow */ -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; const MAX_RESULTS = 1000; -const searchUrl = `customer_groups_search_view/_search?size=${MAX_RESULTS}`; +const searchUrl = `customer_groups_search_view?size=${MAX_RESULTS}`; export function searchGroups(excludeGroups: Array, token: string) { let filters = []; @@ -31,5 +31,5 @@ export function searchGroups(excludeGroups: Array, token: string) { }, }); - return post(searchUrl, matchRule); + return Agni.search(searchUrl, matchRule); } diff --git a/ashes/src/elastic/customers.js b/ashes/src/elastic/customers.js index b90a496ecc..d716867b60 100644 --- a/ashes/src/elastic/customers.js +++ b/ashes/src/elastic/customers.js @@ -1,11 +1,11 @@ /* @flow */ import { toQuery } from './common'; -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; const MAX_RESULTS = 1000; -const mapping = 'customers_search_view/_search'; +const mapping = 'customers_search_view'; const searchUrl = `${mapping}?size=${MAX_RESULTS}`; export function groupCount(criteria: Object, match: string) { @@ -17,7 +17,7 @@ export function groupSearch(criteria: Object, match: string, forCount: boolean = if (forCount) { req.size = 0; } - return post(mapping, req); + return Agni.search(mapping, req); } export function searchCustomers(excludes: Array, token: string) { @@ -45,5 +45,5 @@ export function searchCustomers(excludes: Array, token: string) { }, }); - return post(searchUrl, matchRule); + return Agni.search(searchUrl, matchRule); } diff --git a/ashes/src/elastic/products.js b/ashes/src/elastic/products.js index bc9ac1c439..a7e381357a 100644 --- a/ashes/src/elastic/products.js +++ b/ashes/src/elastic/products.js @@ -1,13 +1,13 @@ /* @flow */ -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; import moment from 'moment'; // 1000 should be big enough to request all promotions with applyType = coupon // without size parameter ES responds with 10 items max const MAX_RESULTS = 1000; -const productsSearchUrl = `products_search_view/_search?size=${MAX_RESULTS}`; +const productsSearchUrl = `products_search_view?size=${MAX_RESULTS}`; type QueryOpts = { omitArchived: ?boolean, @@ -72,5 +72,5 @@ export function searchProducts(token: string, queryOpts: ?QueryOpts): Promise<*> }, }); - return post(productsSearchUrl, matchRule); + return Agni.search(productsSearchUrl, matchRule); } diff --git a/ashes/src/elastic/promotions.js b/ashes/src/elastic/promotions.js index 4ce8e86349..7f44ce7809 100644 --- a/ashes/src/elastic/promotions.js +++ b/ashes/src/elastic/promotions.js @@ -1,12 +1,12 @@ /* @flow */ -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; // 1000 should be big enough to request all promotions with applyType = coupon // without size parameter ES responds with 10 items max const MAX_RESULTS = 1000; -const promotionsSearchUrl: string = `promotions_search_view/_search?size=${MAX_RESULTS}`; +const promotionsSearchUrl: string = `promotions_search_view?size=${MAX_RESULTS}`; export function searchCouponPromotions(token: string): Promise<*> { const filters = []; @@ -33,5 +33,5 @@ export function searchCouponPromotions(token: string): Promise<*> { }, }); - return post(promotionsSearchUrl, matchRule); + return Agni.search(promotionsSearchUrl, matchRule); } diff --git a/ashes/src/elastic/store-admins.js b/ashes/src/elastic/store-admins.js index 896b4a9ab0..cb441258dc 100644 --- a/ashes/src/elastic/store-admins.js +++ b/ashes/src/elastic/store-admins.js @@ -1,7 +1,7 @@ -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; -const adminSearchUrl = 'store_admins_search_view/_search'; +const adminSearchUrl = 'store_admins_search_view'; export function searchAdmins(token) { const caseInsesnitiveToken = token.toLowerCase(); @@ -14,5 +14,5 @@ export function searchAdmins(token) { } }); - return post(adminSearchUrl, matchRule); + return Agni.search(adminSearchUrl, matchRule); } diff --git a/ashes/src/elastic/taxonomy.js b/ashes/src/elastic/taxonomy.js index b35cc6ea67..e256d86416 100644 --- a/ashes/src/elastic/taxonomy.js +++ b/ashes/src/elastic/taxonomy.js @@ -1,10 +1,10 @@ /* @flow */ -import { post } from '../lib/search'; +import Agni from 'lib/agni'; import * as dsl from './dsl'; const MAX_RESULTS = 1000; -const taxonomiesSearchUrl = `taxonomies_search_view/_search?size=${MAX_RESULTS}`; +const taxonomiesSearchUrl = `taxonomies_search_view?size=${MAX_RESULTS}`; export function searchTaxonomies(token: string): Promise<*> { const filters = []; @@ -26,6 +26,5 @@ export function searchTaxonomies(token: string): Promise<*> { }, }); - return post(taxonomiesSearchUrl, matchRule); + return Agni.search(taxonomiesSearchUrl, matchRule); } - diff --git a/ashes/src/lib/agni.js b/ashes/src/lib/agni.js new file mode 100644 index 0000000000..5d6203274b --- /dev/null +++ b/ashes/src/lib/agni.js @@ -0,0 +1,25 @@ +/* @flow */ + +import { request as baseRequest } from './api'; + +type TArgs = [string, Object]; + +class Agni { + searchURI(uri: string): string { + return `/api/advanced-search/admin/${uri}`; + } + + request(method: string, uri: string, data: Object): Promise<*> { + const payload = { + type: 'es', + query: {...data}, + }; + return baseRequest(method, this.searchURI(uri), payload); + } + + search(...args: TArgs): Promise<*> { + return this.request('POST', ...args); + } +} + +export default new Agni(); diff --git a/ashes/src/lib/search.js b/ashes/src/lib/search.js index 2c796059c0..207c5e3476 100644 --- a/ashes/src/lib/search.js +++ b/ashes/src/lib/search.js @@ -1,18 +1,21 @@ +/* @flow */ import { request as baseRequest } from './api'; -function searchURI(uri) { +type TArgs = [string, Object]; + +function searchURI(uri: string): string { return `/api/search/admin/${uri}`; } -function request(method, uri, data) { +function request(method: string, uri: string, data: Object): Promise<*> { return baseRequest(method, searchURI(uri), data); } -export function get(...args) { +export function get(...args: TArgs): Promise<*> { return request('GET', ...args); } -export function post(...args) { +export function post(...args: TArgs): Promise<*> { return request('POST', ...args); } diff --git a/ashes/src/modules/carts/list.js b/ashes/src/modules/carts/list.js index 6206d4a7ad..55204eb14d 100644 --- a/ashes/src/modules/carts/list.js +++ b/ashes/src/modules/carts/list.js @@ -6,7 +6,7 @@ import { addNativeFilters, addShouldFilters } from '../../elastic/common'; const { reducer, actions } = makeLiveSearch( 'carts.list', searchTerms, - 'carts_search_view/_search', + 'carts_search_view', 'cartsScope', { processQuery: (query) => { diff --git a/ashes/src/modules/carts/payment-methods.js b/ashes/src/modules/carts/payment-methods.js index 00a4eebed7..34268a1c18 100644 --- a/ashes/src/modules/carts/payment-methods.js +++ b/ashes/src/modules/carts/payment-methods.js @@ -2,7 +2,7 @@ import _ from 'lodash'; import Api from 'lib/api'; import stripe from 'lib/stripe'; import { createAction, createReducer } from 'redux-act'; -import { post } from 'lib/search'; +import Agni from 'lib/agni'; import { getBillingAddress } from 'lib/credit-card-utils'; import { toQuery } from '../../elastic/common'; import { createAsyncActions } from '@foxcomm/wings'; @@ -75,7 +75,7 @@ const _giftCardSearch = createAsyncActions( }, }]; - return post('gift_cards_search_view/_search', toQuery(filters)); + return Agni.search('gift_cards_search_view', toQuery(filters)); } ); diff --git a/ashes/src/modules/catalog/list.js b/ashes/src/modules/catalog/list.js index 44703a9e9b..7540594e5b 100644 --- a/ashes/src/modules/catalog/list.js +++ b/ashes/src/modules/catalog/list.js @@ -6,7 +6,7 @@ import searchTerms from './search-terms'; const { reducer, actions } = makeLiveSearch( 'catalogs.list', searchTerms, - 'catalogs_search_view/_search', + 'catalogs_search_view', 'catalogsScope', { rawSorts: ['name'], diff --git a/ashes/src/modules/coupons/coupon-codes.js b/ashes/src/modules/coupons/coupon-codes.js index e244da9840..e87c251bb4 100644 --- a/ashes/src/modules/coupons/coupon-codes.js +++ b/ashes/src/modules/coupons/coupon-codes.js @@ -1,4 +1,3 @@ - import makeLiveSearch from '../live-search'; const searchTerms = [ @@ -27,7 +26,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'coupons.couponCodes', searchTerms, - 'coupon_codes_search_view/_search', + 'coupon_codes_search_view', 'couponCodesScope', { skipInitialFetch: true }, diff --git a/ashes/src/modules/coupons/list.js b/ashes/src/modules/coupons/list.js index c0003a6dc1..5ee96e6f4b 100644 --- a/ashes/src/modules/coupons/list.js +++ b/ashes/src/modules/coupons/list.js @@ -1,4 +1,3 @@ - import makeLiveSearch from '../live-search'; const searchTerms = [ @@ -26,7 +25,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'coupons.list', searchTerms, - 'coupons_search_view/_search', + 'coupons_search_view', 'couponsScope', { initialState: { sortBy: '-createdAt' } diff --git a/ashes/src/modules/customer-groups/details/customers-list.js b/ashes/src/modules/customer-groups/details/customers-list.js index a20268308e..86d489aeea 100644 --- a/ashes/src/modules/customer-groups/details/customers-list.js +++ b/ashes/src/modules/customer-groups/details/customers-list.js @@ -21,7 +21,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'customerGroups.details.customers', searchTerms, - 'customers_search_view/_search', + 'customers_search_view', 'customersScope', { skipInitialFetch: true, diff --git a/ashes/src/modules/customer-groups/list.js b/ashes/src/modules/customer-groups/list.js index 679b48e920..3ed230d0d5 100644 --- a/ashes/src/modules/customer-groups/list.js +++ b/ashes/src/modules/customer-groups/list.js @@ -5,7 +5,7 @@ import makeLiveSearch from '../live-search'; const { reducer, actions } = makeLiveSearch( 'customerGroups.list', [], - 'customer_groups_search_view/_search', + 'customer_groups_search_view', 'customerGroupsScope', { processQuery: (query) => addNativeFilters(query,[dsl.existsFilter('deletedAt', 'missing')]), diff --git a/ashes/src/modules/customers/items.js b/ashes/src/modules/customers/items.js index d72ff02aee..8c8dd5ff9e 100644 --- a/ashes/src/modules/customers/items.js +++ b/ashes/src/modules/customers/items.js @@ -41,7 +41,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'customers.items', searchTerms, - 'customer_items_view/_search', + 'customer_items_view', 'customerItemsScope', { skipInitialFetch: true, diff --git a/ashes/src/modules/customers/list.js b/ashes/src/modules/customers/list.js index acb5314b9a..b46176f8d4 100644 --- a/ashes/src/modules/customers/list.js +++ b/ashes/src/modules/customers/list.js @@ -6,7 +6,7 @@ import * as dsl from 'elastic/dsl'; const { reducer, actions } = makeLiveSearch( 'customers.list', searchTerms, - 'customers_search_view/_search', + 'customers_search_view', 'customersScope', { initialState: { sortBy: '-joinedAt' }, diff --git a/ashes/src/modules/customers/store-credit-transactions/transactions.js b/ashes/src/modules/customers/store-credit-transactions/transactions.js index 53b2af3af8..01c246e174 100644 --- a/ashes/src/modules/customers/store-credit-transactions/transactions.js +++ b/ashes/src/modules/customers/store-credit-transactions/transactions.js @@ -1,4 +1,3 @@ - import makeLiveSearch from 'modules/live-search'; const searchTerms = [ @@ -81,7 +80,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'customers.storeCreditTransactions.list', searchTerms, - 'store_credit_transactions_search_view/_search', + 'store_credit_transactions_search_view', 'customerStoreCreditTransactionsScope', { initialState: { sortBy: '-createdAt' }, skipInitialFetch: true diff --git a/ashes/src/modules/customers/store-credits.js b/ashes/src/modules/customers/store-credits.js index e7f9e9770e..a715f89d7c 100644 --- a/ashes/src/modules/customers/store-credits.js +++ b/ashes/src/modules/customers/store-credits.js @@ -1,4 +1,3 @@ - import makeLiveSearch from '../live-search'; const searchTerms = [ @@ -65,7 +64,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'customers.storeCredits', searchTerms, - 'store_credits_search_view/_search', + 'store_credits_search_view', 'customerStoreCreditsScope', { skipInitialFetch: true } diff --git a/ashes/src/modules/customers/transactions/transactions.js b/ashes/src/modules/customers/transactions/transactions.js index da25882331..9fbe923e99 100644 --- a/ashes/src/modules/customers/transactions/transactions.js +++ b/ashes/src/modules/customers/transactions/transactions.js @@ -100,7 +100,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'customers.transactions.list', searchTerms, - 'orders_search_view/_search', + 'orders_search_view', 'customerTransactionsScope', { initialState: { sortBy: '-placedAt' }, skipInitialFetch: true diff --git a/ashes/src/modules/gift-cards/list.js b/ashes/src/modules/gift-cards/list.js index f56bcb9f73..849d8d647b 100644 --- a/ashes/src/modules/gift-cards/list.js +++ b/ashes/src/modules/gift-cards/list.js @@ -7,7 +7,7 @@ import searchTerms from './search-terms'; const { reducer, actions } = makeLiveSearch( 'giftCards.list', searchTerms, - 'gift_cards_search_view/_search', + 'gift_cards_search_view', 'giftCardsScope', { initialState: { sortBy: '-createdAt' } diff --git a/ashes/src/modules/gift-cards/new.js b/ashes/src/modules/gift-cards/new.js index 92e3ede4d5..8c93fc89d7 100644 --- a/ashes/src/modules/gift-cards/new.js +++ b/ashes/src/modules/gift-cards/new.js @@ -1,15 +1,13 @@ - /* @flow weak */ // state for gift card adding form import _ from 'lodash'; -import Api from '../../lib/api'; +import Api from 'lib/api'; import { combineReducers } from 'redux'; import { createAction, createReducer } from 'redux-act'; import { assoc } from 'sprout-data'; - const _createAction = (desc, ...args) => createAction(`GIFT_CARDS_NEW_${desc}`, ...args); export const changeFormData = _createAction('CHANGE_FORM', (name, value) => ({name, value})); @@ -26,7 +24,7 @@ const emptyFilters = []; const emptyPhrase = ''; const quickSearch = makeQuickSearch( 'giftCards.adding.suggestedCustomers', - 'customers_search_view/_search', + 'customers_search_view', emptyFilters, emptyPhrase ); diff --git a/ashes/src/modules/gift-cards/transactions/transactions.js b/ashes/src/modules/gift-cards/transactions/transactions.js index 31b5b2f50f..fcf2192dd3 100644 --- a/ashes/src/modules/gift-cards/transactions/transactions.js +++ b/ashes/src/modules/gift-cards/transactions/transactions.js @@ -1,4 +1,3 @@ - import makeLiveSearch from 'modules/live-search'; const searchTerms = [ @@ -59,7 +58,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'giftCards.transactions.list', searchTerms, - 'gift_card_transactions_view/_search', + 'gift_card_transactions_view', 'giftCardTransactionsScope', { skipInitialFetch: true } diff --git a/ashes/src/modules/inventory/list.js b/ashes/src/modules/inventory/list.js index b349c27ed0..9fdb21b8f5 100644 --- a/ashes/src/modules/inventory/list.js +++ b/ashes/src/modules/inventory/list.js @@ -55,7 +55,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'inventory.list', searchTerms, - 'inventory_search_view/_search', + 'inventory_search_view', 'inventoryScope', { initialState: { sortBy: '-createdAt' }, diff --git a/ashes/src/modules/inventory/transactions/transactions.js b/ashes/src/modules/inventory/transactions/transactions.js index a73cab3bd9..1692079fb4 100644 --- a/ashes/src/modules/inventory/transactions/transactions.js +++ b/ashes/src/modules/inventory/transactions/transactions.js @@ -51,7 +51,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'inventory.transactions.list', searchTerms, - 'inventory_transactions_search_view/_search', + 'inventory_transactions_search_view', 'inventoryScope', { initialState: { sortBy: '-createdAt' }, diff --git a/ashes/src/modules/live-search/searches-data.js b/ashes/src/modules/live-search/searches-data.js index c2c4c1af59..861a039405 100644 --- a/ashes/src/modules/live-search/searches-data.js +++ b/ashes/src/modules/live-search/searches-data.js @@ -1,6 +1,6 @@ import _ from 'lodash'; import { dissoc } from 'sprout-data'; -import { post } from '../../lib/search'; +import Agni from '../../lib/agni'; import { createReducer } from 'redux-act'; import { createNsAction } from './../utils'; import { toQuery, addNativeFilters } from '../../elastic/common'; @@ -59,7 +59,8 @@ export default function makeDataInSearches(namespace, esUrl, options = {}) { dispatch(saveRawQuery(jsonQuery)); - const promise = post(addPaginationParams(esUrl, searchState), processQuery(jsonQuery, { searchState, getState })) + const promise = + Agni.search(addPaginationParams(esUrl, searchState), processQuery(jsonQuery, { searchState, getState })) .then(response => { if (skipProcessingFetch(getState, fetchingSearchIdx)) { promise.abort(); diff --git a/ashes/src/modules/notes.js b/ashes/src/modules/notes.js index 6bf27de141..e00d55cc08 100644 --- a/ashes/src/modules/notes.js +++ b/ashes/src/modules/notes.js @@ -13,7 +13,7 @@ function geCurrentEntity(state) { const {reducer, actions} = makeLiveSearch( 'notes.list', [], - 'notes_search_view/_search', + 'notes_search_view', null, { processQuery: (query, {getState}) => { const currentEntity = geCurrentEntity(getState()); diff --git a/ashes/src/modules/orders/list.js b/ashes/src/modules/orders/list.js index 1e2f91d2a5..74b857c268 100644 --- a/ashes/src/modules/orders/list.js +++ b/ashes/src/modules/orders/list.js @@ -4,7 +4,7 @@ import searchTerms from './search-terms'; const { reducer, actions } = makeLiveSearch( 'orders.list', searchTerms, - 'orders_search_view/_search', + 'orders_search_view', 'ordersScope', { initialState: { sortBy: '-placedAt' }, diff --git a/ashes/src/modules/orders/new-order.js b/ashes/src/modules/orders/new-order.js index 2e283cdb38..bd5186d3f8 100644 --- a/ashes/src/modules/orders/new-order.js +++ b/ashes/src/modules/orders/new-order.js @@ -9,7 +9,7 @@ const emptyFilters = []; const emptyPhrase = ''; const quickSearch = makeQuickSearch( 'orders.newOrder.customers', - 'customers_search_view/_search', + 'customers_search_view', emptyFilters, emptyPhrase ); diff --git a/ashes/src/modules/products/list.js b/ashes/src/modules/products/list.js index db4138801a..3dc5eef8e0 100644 --- a/ashes/src/modules/products/list.js +++ b/ashes/src/modules/products/list.js @@ -9,7 +9,7 @@ import searchTerms from './search-terms'; const { reducer, actions } = makeLiveSearch( 'products.list', searchTerms, - 'products_search_view/_search', + 'products_search_view', 'productsScope', { rawSorts: ['title'], diff --git a/ashes/src/modules/promotions/list.js b/ashes/src/modules/promotions/list.js index 35c11a4701..b53850ef21 100644 --- a/ashes/src/modules/promotions/list.js +++ b/ashes/src/modules/promotions/list.js @@ -44,7 +44,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'promotions.list', searchTerms, - 'promotions_search_view/_search', + 'promotions_search_view', 'promotionsScope', { initialState: { sortBy: '-createdAt' } diff --git a/ashes/src/modules/quick-search.js b/ashes/src/modules/quick-search.js index 7d2acd7ea0..4d03b2a714 100644 --- a/ashes/src/modules/quick-search.js +++ b/ashes/src/modules/quick-search.js @@ -1,6 +1,6 @@ import _ from 'lodash'; import { createReducer } from 'redux-act'; -import { post } from '../lib/search'; +import Agni from 'lib/search'; import { update } from 'sprout-data'; import { toQuery } from '../elastic/common'; import SearchTerm from '../paragons/search-term'; @@ -38,7 +38,7 @@ export default function makeQuickSearch(namespace, searchUrl, searchFilters, phr function fetcher(phrase, queryFilters = filters, options = {}) { options.phrase = phrase; const esQuery = toQuery(queryFilters, options); - return post(addPaginationParams(url, this.searchState), esQuery); + return Agni.search(addPaginationParams(url, this.searchState), esQuery); } const {reducer, ...actions} = makePagination(namespace, fetcher, state => _.get(state, `${namespace}.results`)); diff --git a/ashes/src/modules/skus/list.js b/ashes/src/modules/skus/list.js index 59921b1cd8..f345d79ab2 100644 --- a/ashes/src/modules/skus/list.js +++ b/ashes/src/modules/skus/list.js @@ -39,7 +39,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'skus.list', searchTerms, - 'sku_search_view/_search', + 'sku_search_view', 'skusScope', { rawSorts: ['title'], diff --git a/ashes/src/modules/skus/suggest.js b/ashes/src/modules/skus/suggest.js index 0e85bb58e3..ae12b0e9fe 100644 --- a/ashes/src/modules/skus/suggest.js +++ b/ashes/src/modules/skus/suggest.js @@ -3,7 +3,7 @@ import _ from 'lodash'; import { createAction, createReducer } from 'redux-act'; -import { post } from 'lib/search'; +import Agni from 'lib/agni'; import * as dsl from 'elastic/dsl'; import { createAsyncActions } from '@foxcomm/wings'; @@ -30,7 +30,7 @@ const _suggestSkus = createAsyncActions( })]; } - return post('sku_search_view/_search', dsl.query({ + return Agni.search('sku_search_view', dsl.query({ bool: { filter: filters, should: [ diff --git a/ashes/src/modules/taxonomies/list.js b/ashes/src/modules/taxonomies/list.js index 1732ddccc5..8deedf81c3 100644 --- a/ashes/src/modules/taxonomies/list.js +++ b/ashes/src/modules/taxonomies/list.js @@ -6,7 +6,7 @@ import searchTerms from './search-terms'; const { reducer, actions } = makeLiveSearch( 'taxonomies.list', searchTerms, - 'taxonomies_search_view/_search', + 'taxonomies_search_view', 'taxonomiesScope', { initialState: { sortBy: 'name' }, diff --git a/ashes/src/modules/taxons/details/products-list.js b/ashes/src/modules/taxons/details/products-list.js index f947d7628f..c3cf0a2abb 100644 --- a/ashes/src/modules/taxons/details/products-list.js +++ b/ashes/src/modules/taxons/details/products-list.js @@ -6,7 +6,7 @@ import productsSearchTerms from 'modules/products/search-terms'; const { reducer, actions } = makeLiveSearch( 'taxons.details.products', productsSearchTerms, - 'products_search_view/_search', + 'products_search_view', 'productsScope', { initialState: { sortBy: 'name' }, diff --git a/ashes/src/modules/taxons/list.js b/ashes/src/modules/taxons/list.js index 8e9375ed4c..bacf8c3fb6 100644 --- a/ashes/src/modules/taxons/list.js +++ b/ashes/src/modules/taxons/list.js @@ -4,7 +4,7 @@ import makeLiveSearch from 'modules/live-search'; const searchTerms = []; const storeLocation = 'taxons.list'; -const searchView = 'taxons_search_view/_search'; +const searchView = 'taxons_search_view'; const scope = 'taxonsScope'; const { reducer, actions } = makeLiveSearch( diff --git a/ashes/src/modules/users/list.js b/ashes/src/modules/users/list.js index 0f175e985f..6d33243652 100644 --- a/ashes/src/modules/users/list.js +++ b/ashes/src/modules/users/list.js @@ -26,7 +26,7 @@ const searchTerms = [ const { reducer, actions } = makeLiveSearch( 'users.list', searchTerms, - 'store_admins_search_view/_search', + 'store_admins_search_view', 'storeAdminsScope', { initialState: { sortBy: '-createdAt' }, diff --git a/builder.py b/builder.py index b9a9ae4427..8bd335f822 100755 --- a/builder.py +++ b/builder.py @@ -16,11 +16,11 @@ ROOT_DIR=os.path.abspath(os.path.dirname(os.path.basename(__file__))) - # Structure for now # project directory from root is key # dependent project directories is a value PROJECTS = { + 'agni': [], 'ashes': [], 'data-import': [], 'demo/peacock': [], diff --git a/search-service/api/app/foxcomm/search/api/Api.scala b/search-service/api/app/foxcomm/search/api/Api.scala deleted file mode 100644 index c9d57f575a..0000000000 --- a/search-service/api/app/foxcomm/search/api/Api.scala +++ /dev/null @@ -1,42 +0,0 @@ -package foxcomm.search.api - -import com.sksamuel.elastic4s.ElasticDsl._ -import com.twitter.finagle.Http -import com.twitter.finagle.http.Status -import com.twitter.util.Await -import foxcomm.search._ -import foxcomm.utils.finch._ -import io.circe.generic.auto._ -import io.finch._ -import io.finch.circe._ -import org.elasticsearch.common.ValidationException -import scala.concurrent.ExecutionContext - -object Api extends App { - def endpoint(searchService: SearchService)(implicit ec: ExecutionContext) = - post( - "search" :: string :: string :: param("size") - .as[Int] :: paramOption("from").as[Int] :: jsonBody[SearchQuery]) { - (searchIndex: String, searchType: String, size: Int, from: Option[Int], searchQuery: SearchQuery) ⇒ - searchService - .searchFor(searchIndex / searchType, searchQuery, searchSize = size, searchFrom = from) - .toTwitterFuture - .map(Ok) - } - - def errorHandler[A]: PartialFunction[Throwable, Output[A]] = { - case ex: ValidationException ⇒ Output.failure(ex, Status.BadRequest) - case ex: Exception ⇒ Output.failure(ex, Status.InternalServerError) - case ex ⇒ Output.failure(new RuntimeException(ex), Status.InternalServerError) - } - - implicit val ec: ExecutionContext = ExecutionContext.global - val config = AppConfig.load() - val svc = SearchService.fromConfig(config) - - Await.result( - Http.server - .withStreaming(enabled = true) - .serve(s"${config.http.interface}:${config.http.port}", - endpoint(svc).handle(errorHandler).toServiceAs[Application.Json])) -} diff --git a/search-service/core/app/foxcomm/search/AppConfig.scala b/search-service/core/app/foxcomm/search/AppConfig.scala deleted file mode 100644 index 1dae9a7138..0000000000 --- a/search-service/core/app/foxcomm/search/AppConfig.scala +++ /dev/null @@ -1,18 +0,0 @@ -package foxcomm.search - -import com.typesafe.config.ConfigFactory -import pureconfig._ - -final case class AppConfig(http: AppConfig.Http, elasticsearch: AppConfig.ElasticSearch) - -object AppConfig { - final case class Http(interface: String, port: Int) - - final case class ElasticSearch(host: String, cluster: String) - - def load(): AppConfig = { - val config = - ConfigFactory.systemProperties.withFallback(ConfigFactory.load()) - loadConfigOrThrow[AppConfig](config, "app") - } -} diff --git a/search-service/core/app/foxcomm/search/SearchService.scala b/search-service/core/app/foxcomm/search/SearchService.scala deleted file mode 100644 index 4a1bbbd11f..0000000000 --- a/search-service/core/app/foxcomm/search/SearchService.scala +++ /dev/null @@ -1,50 +0,0 @@ -package foxcomm.search - -import scala.language.postfixOps -import cats.implicits._ -import com.sksamuel.elastic4s.ElasticDsl._ -import com.sksamuel.elastic4s._ -import io.circe._ -import io.circe.jawn.parseByteBuffer -import org.elasticsearch.common.settings.Settings -import scala.concurrent.{ExecutionContext, Future} - -class SearchService(private val client: ElasticClient) extends AnyVal { - import SearchService.ExtractJsonObject - - def searchFor(searchIndex: IndexAndTypes, - searchQuery: SearchQuery, - searchSize: Int, - searchFrom: Option[Int])(implicit ec: ExecutionContext): Future[SearchResult] = { - val baseQuery = search in searchIndex size searchSize rawQuery Json - .fromJsonObject(searchQuery.query) - .noSpaces - val query = searchQuery.fields.fold(baseQuery)(fields ⇒ baseQuery sourceInclude (fields.toList: _*)) - client - .execute(searchFrom.fold(query)(query from)) - .map(response ⇒ - SearchResult(result = response.hits.collect { - case ExtractJsonObject(obj) ⇒ obj - }(collection.breakOut), pagination = SearchPagination(total = response.totalHits))) - } -} - -object SearchService { - object ExtractJsonObject { - def unapply(hit: RichSearchHit): Option[JsonObject] = - parseByteBuffer(hit.sourceRef.toChannelBuffer.toByteBuffer).toOption - .flatMap(_.asObject) - } - - def apply(client: ElasticClient): SearchService = new SearchService(client) - - def fromConfig(config: AppConfig): SearchService = { - val esConfig = config.elasticsearch - val settings = - Settings.settingsBuilder().put("cluster.name", esConfig.cluster).build() - val client = - ElasticClient.transport(settings, ElasticsearchClientUri(esConfig.host)) - - new SearchService(client) - } -} diff --git a/search-service/core/app/foxcomm/search/payload.scala b/search-service/core/app/foxcomm/search/payload.scala deleted file mode 100644 index a6952a7166..0000000000 --- a/search-service/core/app/foxcomm/search/payload.scala +++ /dev/null @@ -1,6 +0,0 @@ -package foxcomm.search - -import cats.data.NonEmptyList -import io.circe.JsonObject - -final case class SearchQuery(query: JsonObject, fields: Option[NonEmptyList[String]]) diff --git a/search-service/project/Dependencies.scala b/search-service/project/Dependencies.scala deleted file mode 100644 index 9714d4f4a7..0000000000 --- a/search-service/project/Dependencies.scala +++ /dev/null @@ -1,35 +0,0 @@ -import sbt._ - -object Dependencies { - object versions { - val cats = "0.9.0" - val circe = "0.8.0" - val elastic4s = "2.1.2" - val finch = "0.14.0" - } - - val core = Seq( - "com.github.pureconfig" %% "pureconfig" % "0.7.2", - "com.typesafe" % "config" % "1.3.1", - "org.typelevel" %% "cats-core" % versions.cats - ) - - val es = Seq( - "com.fasterxml.jackson.dataformat" % "jackson-dataformat-smile" % "2.8.2", - "com.sksamuel.elastic4s" %% "elastic4s-core" % versions.elastic4s - ) - - val circe = Seq( - "io.circe" %% "circe-core" % versions.circe, - "io.circe" %% "circe-generic" % versions.circe, - "io.circe" %% "circe-parser" % versions.circe - ) - - val finch = Seq( - "com.github.finagle" %% "finch-circe" % versions.finch, - "com.github.finagle" %% "finch-core" % versions.finch, - "com.github.finagle" %% "finch-generic" % versions.finch - ) - - val jwt = "com.pauldijou" %% "jwt-core" % "0.12.1" -} diff --git a/tabernacle/ansible/goldrush_appliance.yml b/tabernacle/ansible/goldrush_appliance.yml index cfddb8715f..cdf75e4390 100644 --- a/tabernacle/ansible/goldrush_appliance.yml +++ b/tabernacle/ansible/goldrush_appliance.yml @@ -30,6 +30,7 @@ api_server: "appliance-{{ansible_default_ipv4.address | replace(\".\", \"-\") }}.foxcommerce.com" roles: - { role: dev/dashboard, when: first_run } + - { role: hotfix/rsyslog, when: first_run } - { role: hotfix/drop_mat_views, when: first_run } - { role: dev/flyway } - { role: dev/seed_system, when: first_run } diff --git a/tabernacle/ansible/group_vars/all b/tabernacle/ansible/group_vars/all index cefc5fb78d..950db704ea 100644 --- a/tabernacle/ansible/group_vars/all +++ b/tabernacle/ansible/group_vars/all @@ -51,6 +51,7 @@ gce_zone: "us-central1-a" # Default docker tags for containers docker_tags: ashes: "{{ lookup('env', 'DOCKER_TAG_ASHES') | default('master', true) }}" + agni: "{{ lookup('env', 'DOCKER_TAG_AGNI') | default('master', true) }}" firebrand: "{{ lookup('env', 'DOCKER_TAG_FIREBRAND') | default('master', true) }}" peacock: "{{ lookup('env', 'DOCKER_TAG_PEACOCK') | default('master', true) }}" phoenix: "{{ lookup('env', 'DOCKER_TAG_PHOENIX') | default('master', true) }}" @@ -89,6 +90,7 @@ docker_tags: # Configurable Marathon re-deploys marathon_restart: ashes: "{{ lookup('env', 'MARATHON_ASHES') | default(true, true) | bool }}" + agni: "{{ lookup('env', 'MARATHON_AGNI') | default(true, true) | bool }}" firebrand: "{{ lookup('env', 'MARATHON_FIREBRAND') | default(true, true) | bool }}" peacock: "{{ lookup('env', 'MARATHON_PEACOCK') | default(true, true) | bool }}" phoenix: "{{ lookup('env', 'MARATHON_PHOENIX') | default(true, true) | bool }}" @@ -141,6 +143,10 @@ phoenix_host: "phoenix.{{consul_suffix}}" phoenix_port: 9090 phoenix_server: "{{phoenix_host}}:{{phoenix_port}}" +agni_host: "agni.{{consul_suffix}}" +agni_port: 9000 +agni_server: "{{agni_host}}:{{agni_port}}" + isaac_host: "isaac.{{consul_suffix}}" isaac_port: 9190 isaac_server: "{{isaac_host}}:{{isaac_port}}" @@ -266,6 +272,10 @@ phoenix_api_password: "api$pass7!" phoenix_tax_rule_region: 4129 phoenix_tax_rule_rate: 7.5 +# Agni +agni_src: ../../agni +agni_dir: /agni + # Green River greenriver_env: localhost greenriver_restart: "false" diff --git a/tabernacle/ansible/roles/app/config_gen/templates/goldrush.cfg.j2 b/tabernacle/ansible/roles/app/config_gen/templates/goldrush.cfg.j2 index 3ab8aab6d7..399552cde8 100644 --- a/tabernacle/ansible/roles/app/config_gen/templates/goldrush.cfg.j2 +++ b/tabernacle/ansible/roles/app/config_gen/templates/goldrush.cfg.j2 @@ -16,6 +16,7 @@ export WITH_APPLIANCE_SEEDING={{with_seeding_value | bool | lower}} # Core export DOCKER_TAG_ASHES:=master +export DOCKER_TAG_AGNI:=master export DOCKER_TAG_PEACOCK:=master export DOCKER_TAG_PHOENIX:=master export DOCKER_TAG_GREENRIVER:=master @@ -66,6 +67,7 @@ export DOCKER_TAG_DATA_IMPORT=master #################################################################### # Core +export MARATHON_AGNI:=false export MARATHON_ASHES:=false export MARATHON_PEACOCK:=false export MARATHON_PHOENIX:=false diff --git a/tabernacle/ansible/roles/app/deploy_helper/vars/main.yml b/tabernacle/ansible/roles/app/deploy_helper/vars/main.yml index db099ad5e8..57cfa3e426 100644 --- a/tabernacle/ansible/roles/app/deploy_helper/vars/main.yml +++ b/tabernacle/ansible/roles/app/deploy_helper/vars/main.yml @@ -7,6 +7,7 @@ input_aliases: # Supported canonical apps (otherwise, playbook fails fast) supported_apps: - ashes + - agni - peacock - phoenix - greenriver @@ -75,6 +76,7 @@ app_push_targets: # Override default Docker tags docker_tags: + agni: "{{tag_name}}" ashes: "{{tag_name}}" peacock: "{{tag_name}}" phoenix: "{{tag_name}}" diff --git a/tabernacle/ansible/roles/dev/balancer/templates/service_locations.j2 b/tabernacle/ansible/roles/dev/balancer/templates/service_locations.j2 index c97a617d67..20179048a5 100644 --- a/tabernacle/ansible/roles/dev/balancer/templates/service_locations.j2 +++ b/tabernacle/ansible/roles/dev/balancer/templates/service_locations.j2 @@ -144,6 +144,16 @@ location ~ /api/search/public/.*/\d+$ { break; } +# Proxy to agni +location /api/advanced-search/ { + proxy_pass http://agni/api/search/; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + break; +} + # Proxy to middlewarehouse location /api/v1/inventory/ { auth_request /internal-auth; diff --git a/tabernacle/ansible/roles/dev/balancer/templates/services.j2 b/tabernacle/ansible/roles/dev/balancer/templates/services.j2 index 17e3b5450a..d9a49a4613 100644 --- a/tabernacle/ansible/roles/dev/balancer/templates/services.j2 +++ b/tabernacle/ansible/roles/dev/balancer/templates/services.j2 @@ -58,6 +58,11 @@ upstream sol { << else >> server {{solomon_server}} fail_timeout=30s max_fails=10; << end >> } +upstream agni { + << range service "agni" >> server << .Address >>:<< .Port >> max_fails=10 fail_timeout=30s weight=1; + << else >> server {{agni_server}} fail_timeout=30s max_fails=10; << end >> +} + upstream hyperion { << range service "hyperion" >> server << .Address >>:<< .Port >> max_fails=10 fail_timeout=30s weight=1; << else >> server {{hyperion_server}} fail_timeout=30s max_fails=10; << end >> diff --git a/tabernacle/ansible/roles/dev/marathon_groups/templates/core-backend/agni.json.j2 b/tabernacle/ansible/roles/dev/marathon_groups/templates/core-backend/agni.json.j2 new file mode 100644 index 0000000000..0537da75b5 --- /dev/null +++ b/tabernacle/ansible/roles/dev/marathon_groups/templates/core-backend/agni.json.j2 @@ -0,0 +1,49 @@ +{ + "id": "agni", + "cmd": null, + "cpus": 1, + "mem": 640, + "disk": 0, + "instances": 1, + "constraints": [], + "labels": { + "MARATHON_SINGLE_INSTANCE_APP": "true", + "LANG": "scala", + "consul": "agni", + "overrideTaskName": "agni", + "TAG": "{{docker_tags.agni}}" + }, + "upgradeStrategy": { + "minimumHealthCapacity": 0, + "maximumOverCapacity": 0 + }, + "env": { + "SEARCH_SERVER": "elasticsearch://{{search_server}}", + "JAVA_OPTS":"-XX:+UseConcMarkSweepGC -Xms512m -Xmx512m" + }, + "healthChecks": [{ + "path": "/ping", + "protocol": "HTTP", + "gracePeriodSeconds": 300, + "intervalSeconds": 30, + "timeoutSeconds": 20, + "maxConsecutiveFailures": 3, + "ignoreHttp1xx": false + }], + "container": { + "type": "DOCKER", + "volumes": [{ + "containerPath": "{{docker_logs_dir}}", + "hostPath": "{{docker_logs_host_dir}}", + "mode": "RW" + } + ], + "docker": { + "image": "{{docker_registry}}:5000/agni:{{docker_tags.agni}}", + "network": "HOST", + "privileged": false, + "parameters": [], + "forcePullImage": true + } + } +} diff --git a/tabernacle/ansible/roles/dev/marathon_groups/templates/highlander.json.j2 b/tabernacle/ansible/roles/dev/marathon_groups/templates/highlander.json.j2 index 644cad40bf..cf28a5811d 100644 --- a/tabernacle/ansible/roles/dev/marathon_groups/templates/highlander.json.j2 +++ b/tabernacle/ansible/roles/dev/marathon_groups/templates/highlander.json.j2 @@ -8,7 +8,8 @@ {% include "core-backend/phoenix.json.j2" %}, {% include "core-backend/isaac.json.j2" %}, {% include "core-backend/solomon.json.j2" %}, - {% include "core-backend/middlewarehouse.json.j2" %} + {% include "core-backend/middlewarehouse.json.j2" %}, + {% include "core-backend/agni.json.j2" %} ] }, { diff --git a/tabernacle/ansible/roles/dev/marathon_restart/tasks/main.yml b/tabernacle/ansible/roles/dev/marathon_restart/tasks/main.yml index 9801296f4e..54d56196e8 100644 --- a/tabernacle/ansible/roles/dev/marathon_restart/tasks/main.yml +++ b/tabernacle/ansible/roles/dev/marathon_restart/tasks/main.yml @@ -12,6 +12,7 @@ - { group: core-backend, app: middlewarehouse, id: middlewarehouse } - { group: core-backend, app: isaac, id: isaac } - { group: core-backend, app: solomon, id: solomon } + - { group: core-backend, app: agni, id: agni } - { group: core-frontend, app: ashes, id: ashes } - { group: core-frontend, app: peacock, id: peacock } - { group: core-frontend, app: perfect-gourmet, id: storefront_tpg } diff --git a/tabernacle/ansible/roles/dev/marathon_restart/vars/main.yml b/tabernacle/ansible/roles/dev/marathon_restart/vars/main.yml index f7b92bd65d..de49d82da9 100644 --- a/tabernacle/ansible/roles/dev/marathon_restart/vars/main.yml +++ b/tabernacle/ansible/roles/dev/marathon_restart/vars/main.yml @@ -3,6 +3,7 @@ marathon_deploy: isaac: "{{ lookup('env', 'MRT_ISAAC') | default(false, true) | bool }}" solomon: "{{ lookup('env', 'MRT_SOLOMON') | default(false, true) | bool }}" middlewarehouse: "{{ lookup('env', 'MRT_MIDDLEWAREHOUSE') | default(false, true) | bool }}" + agni: "{{ lookup('env', 'MRT_AGNI') | default(false, true) | bool }}" greenriver: "{{ lookup('env', 'MRT_GREENRIVER') | default(false, true) | bool }}" capture_consumer: "{{ lookup('env', 'MRT_CAPTURE_CONSUMER') | default(false, true) | bool }}" diff --git a/tabernacle/ansible/roles/dev/nginx/templates/service_locations.j2 b/tabernacle/ansible/roles/dev/nginx/templates/service_locations.j2 index 0cd1329aca..f245db87da 100644 --- a/tabernacle/ansible/roles/dev/nginx/templates/service_locations.j2 +++ b/tabernacle/ansible/roles/dev/nginx/templates/service_locations.j2 @@ -144,6 +144,16 @@ location ~ /api/search/public/.*/\d+$ { break; } +# Proxy to agni +location /api/advanced-search/ { + proxy_pass http://agni/api/search/; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + break; +} + # Proxy to middlewarehouse location /api/v1/inventory/ { auth_request /internal-auth; diff --git a/tabernacle/ansible/roles/dev/nginx/templates/services.j2 b/tabernacle/ansible/roles/dev/nginx/templates/services.j2 index 17e3b5450a..38e21debb4 100644 --- a/tabernacle/ansible/roles/dev/nginx/templates/services.j2 +++ b/tabernacle/ansible/roles/dev/nginx/templates/services.j2 @@ -68,6 +68,11 @@ upstream geronimo { << else >> server {{geronimo_server}} fail_timeout=30s max_fails=10; << end >> } +upstream agni { + << range service "agni" >> server << .Address >>:<< .Port >> max_fails=10 fail_timeout=30s weight=1; + << else >> server {{agni_server}} fail_timeout=30s max_fails=10; << end >> +} + upstream ashes { << range service "ashes" >> server << .Address >>:<< .Port >> max_fails=10 fail_timeout=30s weight=1; << else >> server {{ashes_server}} fail_timeout=30s max_fails=10; << end >> diff --git a/tabernacle/ansible/roles/hotfix/rsyslog/tasks/main.yml b/tabernacle/ansible/roles/hotfix/rsyslog/tasks/main.yml new file mode 100644 index 0000000000..c6384bbcc2 --- /dev/null +++ b/tabernacle/ansible/roles/hotfix/rsyslog/tasks/main.yml @@ -0,0 +1,8 @@ +- name: Remove Current File Data Forwarding Config + file: path=/etc/rsyslog.d/51-mesos.conf state=absent + +- name: Install New File Data Forwarding Config + template: src=mesos.conf.j2 dest=/etc/rsyslog.d/51-mesos.conf owner=root group=root mode=0644 + +- name: Restart rsyslog + service: name=rsyslog state=restarted diff --git a/tabernacle/ansible/roles/hotfix/rsyslog/templates/mesos.conf.j2 b/tabernacle/ansible/roles/hotfix/rsyslog/templates/mesos.conf.j2 new file mode 100644 index 0000000000..7295eb724d --- /dev/null +++ b/tabernacle/ansible/roles/hotfix/rsyslog/templates/mesos.conf.j2 @@ -0,0 +1,77 @@ +module(load="imfile" PollingInterval="10") + +# application logs in docker containers +input(type="imfile" + File="/var/log/docker/phoenix.log" + Tag="phoenix" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/middlewarehouse.log" + Tag="middlewarehouse" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/ashes.log" + Tag="ashes" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/td-storefront.log" + Tag="storefront" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/tpg-storefront.log" + Tag="storefront" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/firebrand.log" + Tag="storefront" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/isaac.log" + Tag="isaac" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/solomon.log" + Tag="solomon" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/green-river.log" + Tag="green-river" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/messaging.log" + Tag="messaging" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/capture-consumer.log" + Tag="capture-consumer" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/gift-card-consumer.log" + Tag="gift-card-consumer" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/shipment-consumer.log" + Tag="shipment-consumer" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/stock-items-consumer.log" + Tag="stock-items-consumer" + Facility="local6") + +input(type="imfile" + File="/var/log/docker/agni.log" + Tag="agni" + Facility="local6") diff --git a/tabernacle/ansible/roles/ops/buildkite_pipeline/templates/pipeline.json.j2 b/tabernacle/ansible/roles/ops/buildkite_pipeline/templates/pipeline.json.j2 index f282abe264..ba8ade1349 100644 --- a/tabernacle/ansible/roles/ops/buildkite_pipeline/templates/pipeline.json.j2 +++ b/tabernacle/ansible/roles/ops/buildkite_pipeline/templates/pipeline.json.j2 @@ -24,6 +24,7 @@ "APPLIANCE_DNS_RECORD": "feature-branch-{{docker_tag_name}}", "DOCKER_TAG_PHOENIX": "{{docker_tag_name}}", "MARATHON_PHOENIX": "true", + "MARATHON_AGNI": "false", "MARATHON_ASHES": "false", "MARATHON_PEACOCK": "false", "MARATHON_GREENRIVER": "false",