Skip to content

Commit

Permalink
Bump version number to 0.10.0.
Browse files Browse the repository at this point in the history
  • Loading branch information
obermeier committed Apr 25, 2017
2 parents 3a69c81 + 0880cee commit 7135618
Show file tree
Hide file tree
Showing 67 changed files with 2,333 additions and 980 deletions.
59 changes: 40 additions & 19 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.scray</groupId>
<artifactId>scray</artifactId>
<version>0.9.5</version>
<artifactId>scray-parent</artifactId>
<version>0.10.0</version>
<inceptionYear>2014</inceptionYear>
<packaging>pom</packaging>
<name>Scray</name>
Expand All @@ -20,6 +20,11 @@
</licenses>

<developers>
<developer>
<name>Christian Zirpins</name>
<organization>Scray</organization>
<organizationUrl>https://github.com/scray</organizationUrl>
</developer>
<developer>
<name>Andreas Petter</name>
<organization>Scray</organization>
Expand All @@ -43,7 +48,7 @@
<url>https://github.com/scray</url>
<connection>scm:git:git://github.com/scray/scray.git</connection>
<developerConnection>scm:git:https://github.com/scray/scray.git</developerConnection>
<tag>org.scray-0.9.5</tag>
<tag>org.scray-0.10.0</tag>
</scm>

<properties>
Expand Down Expand Up @@ -103,7 +108,7 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.10</artifactId>
<artifactId>scalatest_${scala.suffix}</artifactId>
<version>2.2.2</version>
<scope>test</scope>
</dependency>
Expand All @@ -115,12 +120,12 @@
</dependency>
<dependency>
<groupId>com.typesafe.scala-logging</groupId>
<artifactId>scala-logging-api_2.10</artifactId>
<artifactId>scala-logging-api_${scala.suffix}</artifactId>
<version>2.1.2</version>
</dependency>
<dependency>
<groupId>com.typesafe.scala-logging</groupId>
<artifactId>scala-logging-slf4j_2.10</artifactId>
<artifactId>scala-logging-slf4j_${scala.suffix}</artifactId>
<version>2.1.2</version>
</dependency>
<dependency>
Expand All @@ -133,7 +138,7 @@
<dependencies>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>util-core_2.10</artifactId>
<artifactId>util-core_${scala.suffix}</artifactId>
<version>6.22.0</version>
</dependency>
<dependency>
Expand Down Expand Up @@ -161,22 +166,12 @@
<artifactId>guava</artifactId>
<version>16.0.1</version>
</dependency>

<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>2.1.7.1</version>
<classifier>shaded</classifier>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
<version>3.1.0</version>
</dependency>
</dependencies>


</dependencyManagement>

<build>
Expand Down Expand Up @@ -272,7 +267,7 @@
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.0</version>
<version>3.2.1</version>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
Expand Down Expand Up @@ -392,6 +387,32 @@
</plugins>
</build>
</profile>
<profile>
<id>default</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>

</project>

41 changes: 22 additions & 19 deletions scray-cassandra/pom.xml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>org.scray</groupId>
<artifactId>scray</artifactId>
<version>0.9.5</version>
<artifactId>scray-parent</artifactId>
<version>0.10.0</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand All @@ -24,25 +24,14 @@
</distributionManagement>

<dependencies>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<classifier>shaded</classifier>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.scray</groupId>
<artifactId>scray-querying</artifactId>
<version>0.9.5</version>
<version>0.10.0</version>
</dependency>
<dependency>
<groupId>com.github.scopt</groupId>
<artifactId>scopt_2.10</artifactId>
<artifactId>scopt_${scala.suffix}</artifactId>
<version>3.2.0</version>
<scope>test</scope>
</dependency>
Expand All @@ -53,12 +42,12 @@
</dependency>
<dependency>
<groupId>com.websudos</groupId>
<artifactId>phantom-dsl_2.10</artifactId>
<version>1.4.0</version>
<artifactId>phantom-dsl_${scala.suffix}</artifactId>
<version>1.6.0</version>
<exclusions>
<exclusion>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<groupId>com.datastax.cassandra</groupId>
</exclusion>
</exclusions>
</dependency>
Expand All @@ -68,6 +57,20 @@
<version>2.1.9.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>1.2.1</version>
</dependency>
</dependencies>

<build>
Expand All @@ -83,7 +86,7 @@
</plugin>
</plugins>
</build>

<licenses>
<license>
<name>Apache License, Version 2.0</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,12 @@ class CassandraQueryableSource[Q <: DomainQuery](
if(ordering.column.table == ti) {
// if we want to check whether the data is ordered according to query Q we need to make sure that...
// 1. the first clustering key with the particular order given in the query is identical to the columns name
clusteringKeyColumns.head.columnName == ordering.column.columnName ||
val clusteringKeyOrder = if(!clusteringKeyColumns.isEmpty) {
clusteringKeyColumns.head.columnName == ordering.column.columnName
} else {
false
}
clusteringKeyOrder ||
// 2. there isn't any Cassandra-Lucene-Plugin column indexed that can be ordered
autoIndexedColumns.find { colConf => colConf._1 == ordering.column }.isDefined
} else {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package scray.cassandra.example

import scray.cassandra.sync.CassandraImplementation._
import scray.querying.sync.Column
import scray.querying.sync.Column
import scray.querying.sync.Columns
import scray.querying.sync.Table
import scray.querying.sync.RowWithValue
import scray.querying.sync.ColumnWithValue
import scray.querying.sync.RowWithValue

object BatchOutputTable {
val count = new Column[Int]("count")

val columns = new Columns(new Column[String]("key") :: count :: Nil, "(key)", None)
val table = new Table("\"BDQ_BATCH\"", "\"BatchCountExample\"", columns)

val row = new RowWithValue(new ColumnWithValue[String]("key", "key") :: new ColumnWithValue("count", 1) :: Nil, "(key)", None)

def setCounter(count: Int) = {new RowWithValue(new ColumnWithValue[String]("key", "key") :: new ColumnWithValue("count", count) :: Nil, "(key)", None)}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
package scray.cassandra.example

import scray.cassandra.sync.CassandraJobInfo
import scray.cassandra.sync.OnlineBatchSyncCassandra
import scray.querying.sync.ColumnWithValue
import com.datastax.driver.core.Cluster
import scray.cassandra.sync.CassandraDbSession
import com.datastax.driver.core.querybuilder.QueryBuilder

/**
* Write 100 batch versions in 5 slots.
* Use old data for new calculation.
* newDate = oldDate + 1
*/
object BatchVersioningMain {

def main(args: Array[String]) {

if(args.length != 1) {
println("Hostname for cassandra cluster as parameter requiered")
System.exit(0);
}

val table = new OnlineBatchSyncCassandra(args(0))
val jobInfo = new CassandraJobInfo("ScrayExample", 5)

// Prepare database
table.initJob(jobInfo, BatchOutputTable.table.columns)

for (x <- 1 until 100) {

table.startNextBatchJob(jobInfo)

// Get old data
val lastBatchData = table.getBatchJobData(jobInfo, BatchOutputTable.row)

// Create new data (Increment counter)
val newCount: Int = lastBatchData.map {_.head.getColumn(BatchOutputTable.count).map { _.value }}.flatten.getOrElse(0) + 1
Thread.sleep(5000)

// Write new data
table.insertInBatchTable(jobInfo, BatchOutputTable.setCounter(newCount))

// Complete job
table.completeBatchJob(jobInfo)

println(s"\n Writen batch data ${newCount} \n")

}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ import scray.cassandra.sync.CassandraJobInfo
import com.twitter.util.FuturePool
import com.datastax.driver.core.{ Row => CassRow }
import scray.querying.source.store.QueryableStoreSource
import scray.querying.description.internal.MaterializedView

/**
* Helper class to create a configuration for a Cassandra table
Expand Down Expand Up @@ -116,21 +117,20 @@ class CassandraExtractor[Q <: DomainQuery](session: Session, table: TableIdentif
/**
* return whether and maybe how the given column is auto-indexed by Cassandra-Lucene-Plugin
*/
private def getColumnCassandraLuceneIndexed(tmOpt: Option[TableMetadata], column: Column,
splitters: Map[Column, Splitter[_]]): Option[AutoIndexConfiguration[_]] = {
val cmOpt = tmOpt.flatMap { tm => Option(tm.getColumn(Metadata.quote(CassandraExtractor.LUCENE_COLUMN_NAME))) }
val schemaOpt = cmOpt.flatMap (cm => Option(cm.getIndex).map(_.getOption(CassandraExtractor.LUCENE_INDEX_SCHEMA_OPTION_NAME)))
private def getColumnCassandraLuceneIndexed(tmOpt: Option[TableMetadata], column: Column, splitters: Map[Column, Splitter[_]]): Option[AutoIndexConfiguration[_]] = {
val idxMetadata = tmOpt.flatMap { tm => Option(tm.getIndex(Metadata.quote(CassandraExtractor.LUCENE_INDEX_NAME(tm.getName)))) }
val schemaOpt = idxMetadata.map { schemaOptions => schemaOptions.getOption(CassandraExtractor.LUCENE_INDEX_SCHEMA_OPTION_NAME) }

schemaOpt.flatMap { schema =>
logger.trace(s"Lucene index schema is: $schema")
val outerMatcher = CassandraExtractor.outerPattern.matcher(schema)
if(outerMatcher.matches()) {
val fieldString = outerMatcher.group(1)
if(CassandraExtractor.innerPattern.split(fieldString, -1).find { _.trim() == column.columnName }.isDefined) {
cmOpt.get.getType
if(splitters.get(column).isDefined) {
logger.debug(s"Found Lucene-indexed column ${column.columnName} for table ${tmOpt.get.getName} with splitting option")
logger.debug(s"Found Lucene-indexed column ${column.columnName} for table ${tmOpt.get.getKeyspace.getName}.${tmOpt.get.getName} with splitting option")
} else {
logger.debug(s"Found Lucene-indexed column ${column.columnName} for table ${tmOpt.get.getName}")
logger.debug(s"Found Lucene-indexed column ${column.columnName} for table ${tmOpt.get.getKeyspace.getName}.${tmOpt.get.getName}")
}
Some(AutoIndexConfiguration[Any](isRangeIndex = true, isFullTextIndex = true, isSorted = true,
rangePartioned = splitters.get(column).map(_.splitter).asInstanceOf[Option[((Any, Any), Boolean) => Iterator[(Any, Any)]]]))
Expand All @@ -154,9 +154,16 @@ class CassandraExtractor[Q <: DomainQuery](session: Session, table: TableIdentif
case _ => None
}
val tm = metadata.flatMap(ksmeta => Option(CassandraUtils.getTableMetadata(tableName, ksmeta)))
val autoIndex = metadata.flatMap{_ =>
val cm = tm.map(_.getColumn(Metadata.quote(column.columnName)))
cm.flatMap(colmeta => Option(colmeta.getIndex()))}.isDefined
val autoIndex = tm.flatMap { tm =>
val idxMethadata = tm.getIndex(Metadata.quote(tm.getName + "_" + column.columnName))
if(idxMethadata == null) {
None
} else {
logger.debug(s"Found index for ${tm.getKeyspace.getName}.${tm.getName}.${column.columnName} ")
Some(true)
}
}.isDefined

val autoIndexConfig = getColumnCassandraLuceneIndexed(tm, column, splitters)
if(autoIndexConfig.isDefined) {
(true, autoIndexConfig)
Expand Down Expand Up @@ -261,8 +268,8 @@ class CassandraExtractor[Q <: DomainQuery](session: Session, table: TableIdentif
val cassSession = new CassandraDbSession(session)
val syncApiInstance = new OnlineBatchSyncCassandra(cassSession)
val jobInfo = new CassandraJobInfo(jobName)
val latestComplete = () => syncApiInstance.getBatchVersion(jobInfo)
val runtimeVersion = () => syncApiInstance.getOnlineVersion(jobInfo)
val latestComplete = () => None // syncApiInstance.getBatchVersion(jobInfo) // FIXME
val runtimeVersion = () => None // syncApiInstance.getOnlineVersion(jobInfo)
// if we have a runtime Version, this will be the table to read from, for now. We
// expect that all data is aggregated with the corresponding complete version.
// TODO: in the future we want this to be able to merge, such that it will be an
Expand Down Expand Up @@ -327,9 +334,7 @@ class CassandraExtractor[Q <: DomainQuery](session: Session, table: TableIdentif
versioningConfig.map(_.queryableStore.get.asInstanceOf[CassandraQueryableSource[Q]].mappingFunction).orNull.asInstanceOf[DomainQuery => Q]
},
cassQuerySource,
cassQuerySource,
// TODO: add materialized views
List()
cassQuerySource
)
}

Expand Down Expand Up @@ -362,7 +367,7 @@ object CassandraExtractor {
// }

val DB_ID: String = "cassandra"
val LUCENE_COLUMN_NAME: String = "lucene"
val LUCENE_INDEX_NAME: String => String = (cfName: String) => s"""${cfName}_lucene_index"""
val LUCENE_INDEX_SCHEMA_OPTION_NAME: String = "schema"

lazy val outerPattern = Pattern.compile("^\\s*\\{\\s*fields\\s*:\\s*\\{(.*)\\s*}\\s*\\}\\s*$", Pattern.DOTALL)
Expand Down
Loading

0 comments on commit 7135618

Please sign in to comment.