Skip to content

Commit

Permalink
Fixed very complicated build issues, #110
Browse files Browse the repository at this point in the history
Time estimation: 4h
  • Loading branch information
kudkudak committed Apr 19, 2014
1 parent 0dc6448 commit 80242b9
Show file tree
Hide file tree
Showing 14 changed files with 378 additions and 133 deletions.
8 changes: 8 additions & 0 deletions mantis_shrimp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,11 @@ Run

And open project in IntelliJ (remember to see project structure if is correct -
ctrl+alt+shift+s)


**Note**

You will need .jar of scala-kafka. I will push fork of scala-kafka that
compiles locally. For now you can try yourself


33 changes: 28 additions & 5 deletions mantis_shrimp/build.sbt
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import sbt._
import sbt.Keys._

lazy val stampleRootProject = Project(id = "mantis_shrimp",base = file("."))

name := "mantis_shrimp"

Expand All @@ -10,11 +8,35 @@ scalaVersion := "2.10.2"

resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"

libraryDependencies += "org.scala-sbt" % "sbt" % "0.13.1"
libraryDependencies += "com.typesafe.akka" % "akka-actor_2.10" % "2.2-M1" classifier "javadoc"

libraryDependencies += "com.typesafe.akka" % "akka-actor_2.10" % "2.2-M1"

libraryDependencies += "com.typesafe.akka" % "akka-actor_2.10" % "2.2-M1" classifier "javadoc"
libraryDependencies += "org.scala-sbt" % "sbt" % "0.13.1"

libraryDependencies += "org.apache.kafka" % "kafka_2.10" % "0.8.1"

libraryDependencies += "com.101tec" % "zkclient" % "0.3"

libraryDependencies += "log4j" % "log4j" % "1.2.17"

libraryDependencies += "net.sf.jopt-simple" % "jopt-simple" % "4.5"

libraryDependencies += "org.apache.avro" % "avro" % "1.7.5"

libraryDependencies += "com.github.scopt" % "scopt_2.9.2" % "3.1.0"

libraryDependencies += "com.yammer.metrics" % "metrics-core" % "2.2.0"

libraryDependencies += "nl.grons" % "metrics-scala_2.9.2" % "3.0.3"

libraryDependencies += "org.apache.thrift" % "libthrift" % "0.9.1"

libraryDependencies += "joda-time" % "joda-time" % "2.3"

libraryDependencies += "org.joda" % "joda-convert" % "1.5"

libraryDependencies += "org.specs2" % "specs2_2.10" % "2.2.2"

libraryDependencies ++= Seq("com.propensive" %% "rapture-core" % "0.9.0")

Expand All @@ -32,3 +54,4 @@ libraryDependencies ++= Seq("com.propensive" %% "rapture-fs" % "0.9.0")

libraryDependencies ++= Seq("com.propensive" %% "rapture-fs" % "0.9.0" classifier "javadoc")


32 changes: 0 additions & 32 deletions mantis_shrimp/ner/FourClassNERTagger.scala

This file was deleted.

12 changes: 0 additions & 12 deletions mantis_shrimp/ner/NERTag.scala

This file was deleted.

10 changes: 0 additions & 10 deletions mantis_shrimp/ner/NERTagger.scala

This file was deleted.

37 changes: 0 additions & 37 deletions mantis_shrimp/ner/SevenClassNERTagger.scala

This file was deleted.

31 changes: 0 additions & 31 deletions mantis_shrimp/ner/ThreeClassNERTagger.scala

This file was deleted.

5 changes: 5 additions & 0 deletions mantis_shrimp/project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
resolvers += "Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/"

addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.7.0-SNAPSHOT")

logLevel := Level.Warn
4 changes: 1 addition & 3 deletions mantis_shrimp/src/main/scala/HelloActor.scala
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package mantis_shrimp

import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.Props
import akka.actor.{Actor, Props, ActorSystem}

class HelloActor extends Actor {
def receive = {
Expand Down
44 changes: 41 additions & 3 deletions mantis_shrimp/src/main/scala/Sandbox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,18 @@
// See https://github.com/propensive/rapture-json-test/blob/master/src/json.scala - for scala test of json

package mantis_shrimp

import ly.stealth.testing

import org.specs2.mutable._
import java.util.UUID
import kafka.consumer._
import kafka.producer._
import kafka.utils._
import kafka.akka._
import akka.actor.{Actor, Props, ActorSystem}
import akka.routing.RoundRobinRouter

import rapture.core._
import rapture.json._
import jsonParsers.scalaJson._
Expand All @@ -13,8 +25,34 @@ import scala.util.parsing.json._
//Internal strategy for Rapture.io - I dont see this design choice..
import strategy.throwExceptions

object Sandbox extends App {
def runTest(implicit parser: JsonBufferParser[String]){
object Sandbox extends App with Logging {
def kafkaScalaIntegrationTest(implicit parser: JsonBufferParser[String]){

info("Fetching kafka configs")
info(DonCorleoneUtils.get_configuration[Int]("kafka","port").toString())
info(DonCorleoneUtils.get_configuration[String]("kafka","host").toString())

val kafka_port = DonCorleoneUtils.get_configuration[Int]("kafka","port")
val kafka_host = DonCorleoneUtils.get_configuration[String]("kafka","host")

val testMessage = UUID.randomUUID().toString
val testTopic = UUID.randomUUID().toString
val groupId_1 = UUID.randomUUID().toString

var testStatus = false

info("starting sample broker testing")
val producer = new KafkaProducer(testTopic,f"$kafka_host%s:$kafka_port%d")
producer.send(testMessage)

val consumer = new KafkaConsumer(testTopic,groupId_1,f"$kafka_host%s:$kafka_port%d")

def exec(binaryObject: Array[Byte]) = {
val message = new String(binaryObject)
info("testMessage = " + testMessage + " and consumed message = " + message)
consumer.close()
testStatus = true
}

val json_example = json""" {"ala":13, "beka":[1,2,"ala2"]} """

Expand All @@ -23,5 +61,5 @@ object Sandbox extends App {
println(json_example.ala.as[Int]);
}

runTest
kafkaScalaIntegrationTest
}
52 changes: 52 additions & 0 deletions mantis_shrimp/src/main/scala/kafka/AkkaKafka.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package kafka.akka;

import scala.collection.JavaConverters._
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TIOStreamTransport
import org.apache.thrift.{TSerializer, TDeserializer}
import akka.actor.{Actor, Props, ActorSystem}
import akka.routing.RoundRobinRouter
import kafka.utils.Logging
import java.util.concurrent.atomic._
import kafka.producer._

class KafkaAkkaProducer extends Actor with Logging {
private val producerCreated = new AtomicBoolean(false)

var producer: KafkaProducer = null

def init(topic: String, zklist: String) = {
if (producerCreated.getAndSet(true)) {
throw new RuntimeException(this.getClass.getSimpleName + " this kafka akka actor has a producer already")
}

producer = new KafkaProducer(topic,zklist)
}

def receive = {
case t: (String, String) {
init(t._1, t._2)
}
case msg: String
{
producer.send(msg)
}
}
}
Loading

0 comments on commit 80242b9

Please sign in to comment.