index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_getset.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_getset extends AbstractRedisOperation {
RO_getset(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
Slice response() {
Slice value = base().rawGet(params().get(0));
base().rawPut(params().get(0), params().get(1), -1L);
return Response.bulkString(value);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_incr.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_incr extends RO_incrby {
RO_incr(RedisBase base, List<Slice> params) {
super(base, params, 1);
}
@Override
long incrementOrDecrementValue(List<Slice> params){
return 1L;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_incrOrDecrBy.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
abstract class RO_incrOrDecrBy extends AbstractRedisOperation {
RO_incrOrDecrBy(RedisBase base, List<Slice> params, Integer expectedParams) {
super(base, params, expectedParams, null, null);
}
abstract long incrementOrDecrementValue(List<Slice> params);
Slice response() {
Slice key = params().get(0);
long d = incrementOrDecrementValue(params());
Slice v = base().rawGet(key);
if (v == null) {
base().rawPut(key, new Slice(String.valueOf(d)), -1L);
return Response.integer(d);
}
long r = convertToLong(new String(v.data())) + d;
base().rawPut(key, new Slice(String.valueOf(r)), -1L);
return Response.integer(r);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_incrby.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
class RO_incrby extends RO_incrOrDecrBy {
RO_incrby(RedisBase base, List<Slice> params) {
super(base, params, 2);
}
RO_incrby(RedisBase base, List<Slice> params, Integer expectedParams) {
super(base, params, expectedParams);
}
long incrementOrDecrementValue(List<Slice> params){
return convertToLong(String.valueOf(params.get(1)));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_keys.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.ArrayList;
import java.util.List;
class RO_keys extends AbstractRedisOperation {
RO_keys(RedisBase base, List<Slice> params) {
super(base, params, 1, null, null);
}
Slice response() {
List<Slice> matchingKeys = new ArrayList<>();
String regex = createRegexFromGlob(new String(params().get(0).data()));
base().keys().forEach(keyData -> {
String key = new String(keyData.data());
if(key.matches(regex)){
matchingKeys.add(Response.bulkString(keyData));
}
});
return Response.array(matchingKeys);
}
private static String createRegexFromGlob(String glob)
{
StringBuilder out = new StringBuilder("^");
for(int i = 0; i < glob.length(); ++i)
{
final char c = glob.charAt(i);
switch(c)
{
case '*':
out.append(".*");
break;
case '?':
out.append('.');
break;
case '.':
out.append("\\.");
break;
case '\\':
out.append("\\\\");
break;
default:
out.append(c);
}
}
out.append('$');
return out.toString();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lindex.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToInteger;
import static ai.grakn.redismock.Utils.deserializeObject;
class RO_lindex extends AbstractRedisOperation {
RO_lindex(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
LinkedList<Slice> list;
if (data != null) {
list = deserializeObject(data);
} else {
return Response.NULL;
}
int index = convertToInteger(params().get(1).toString());
if (index < 0) {
index = list.size() + index;
if (index < 0) {
return Response.NULL;
}
}
if (index >= list.size()) {
return Response.NULL;
}
return Response.bulkString(list.get(index));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_llen.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.Lists;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.deserializeObject;
class RO_llen extends AbstractRedisOperation {
RO_llen(RedisBase base, List<Slice> params) {
super(base, params, 1, null, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
LinkedList<Slice> list;
if (data != null) {
list = deserializeObject(data);
} else {
list = Lists.newLinkedList();
}
return Response.integer(list.size());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lpop.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
class RO_lpop extends RO_pop {
RO_lpop(RedisBase base,List<Slice> params ) {
super(base, params);
}
@Override
Slice popper(LinkedList<Slice> list) {
return list.removeFirst();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lpush.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
class RO_lpush extends RO_push {
RO_lpush(RedisBase base, List<Slice> params) {
super(base, params);
}
@Override
void pusher(LinkedList<Slice> list, Slice slice) {
list.addFirst(slice);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lpushx.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_lpushx extends RO_lpush {
RO_lpushx(RedisBase base, List<Slice> params) {
super(base, params);
}
Slice response(){
Slice key = params().get(0);
Slice data = base().rawGet(key);
if(data != null){
return super.response();
}
return Response.integer(0);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lrange.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToInteger;
import static ai.grakn.redismock.Utils.deserializeObject;
class RO_lrange extends AbstractRedisOperation {
RO_lrange(RedisBase base, List<Slice> params) {
super(base, params, 3, null, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
LinkedList<Slice> list;
if (data != null) {
list = deserializeObject(data);
} else {
list = Lists.newLinkedList();
}
int start = convertToInteger(params().get(1).toString());
int end = convertToInteger(params().get(2).toString());
if (start < 0) {
start = list.size() + start;
if (start < 0) {
start = 0;
}
}
if (end < 0) {
end = list.size() + end;
if (end < 0) {
end = 0;
}
}
ImmutableList.Builder<Slice> builder = new ImmutableList.Builder<Slice>();
for (int i = start; i <= end && i < list.size(); i++) {
builder.add(Response.bulkString(list.get(i)));
}
return Response.array(builder.build());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_lrem.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToInteger;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
class RO_lrem extends AbstractRedisOperation {
RO_lrem(RedisBase base, List<Slice> params) {
super(base, params,3, null, null);
}
Slice response(){
Slice key = params().get(0);
int numRemove = convertToInteger(new String(params().get(1).data()));
Slice target = params().get(2);
Slice data = base().rawGet(key);
if(data == null){
return Response.integer(0);
}
LinkedList<Slice> list = deserializeObject(data);
//Determine the directionality of the deletions
int numRemoved = 0;
Iterator<Slice> iterator;
if(numRemove < 0){
iterator = list.descendingIterator();
} else {
iterator = list.iterator();
}
numRemove = Math.abs(numRemove);
while (iterator.hasNext()){
Slice element = iterator.next();
if(element.equals(target) && (numRemove == 0 || numRemoved < numRemove)){
iterator.remove();
numRemoved++;
}
}
base().rawPut(key, serializeObject(list), -1L);
return Response.integer(numRemoved);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_mget.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.ImmutableList;
import java.util.List;
class RO_mget extends AbstractRedisOperation {
RO_mget(RedisBase base, List<Slice> params) {
super(base, params, null, 0, null);
}
Slice response(){
ImmutableList.Builder<Slice> builder = new ImmutableList.Builder<Slice>();
for (Slice key : params()) {
builder.add(Response.bulkString(base().rawGet(key)));
}
return Response.array(builder.build());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_mset.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_mset extends AbstractRedisOperation {
RO_mset(RedisBase base, List<Slice> params ) {
super(base, params, null, 0, 2);
}
Slice response() {
for (int i = 0; i < params().size(); i += 2) {
base().rawPut(params().get(i), params().get(i + 1), -1L);
}
return Response.OK;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pexpire.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
class RO_pexpire extends AbstractRedisOperation {
RO_pexpire(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
long getValue(List<Slice> params){
return convertToLong(new String(params.get(1).data()));
}
Slice response() {
return Response.integer(base().setTTL(params().get(0), getValue(params())));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pexpireat.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
class RO_pexpireat extends AbstractRedisOperation {
RO_pexpireat(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
Slice response() {
long deadline = convertToLong(new String(params().get(1).data()));
return Response.integer(base().setDeadline(params().get(0), deadline));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pfadd.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
class RO_pfadd extends AbstractRedisOperation {
RO_pfadd(RedisBase base, List<Slice> params) {
super(base, params,null, 1, null);
}
Slice response(){
Slice key = params().get(0);
Slice data = base().rawGet(key);
boolean first;
Set<Slice> set;
int prev;
if (data == null) {
set = Sets.newHashSet();
first = true;
prev = 0;
} else {
set = deserializeObject(data);
first = false;
prev = set.size();
}
for (Slice v : params().subList(1, params().size())) {
set.add(v);
}
Slice out = serializeObject(set);
if (first) {
base().rawPut(key, out, -1L);
} else {
base().rawPut(key, out, null);
}
if (prev != set.size()) {
return Response.integer(1L);
}
return Response.integer(0L);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pfcount.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
import static ai.grakn.redismock.Utils.deserializeObject;
class RO_pfcount extends AbstractRedisOperation {
RO_pfcount(RedisBase base, List<Slice> params) {
super(base, params, null, 0, null);
}
Slice response() {
Set<Slice> set = Sets.newHashSet();
for (Slice key : params()) {
Slice data = base().rawGet(key);
if (data == null) {
continue;
}
Set<Slice> s = deserializeObject(data);
set.addAll(s);
}
return Response.integer((long) set.size());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pfmerge.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
class RO_pfmerge extends AbstractRedisOperation {
RO_pfmerge(RedisBase base, List<Slice> params) {
super(base, params,null, 0, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
boolean first;
Set<Slice> set;
if (data == null) {
set = Sets.newHashSet();
first = true;
} else {
set = deserializeObject(data);
first = false;
}
for (Slice v : params().subList(1, params().size())) {
Slice src = base().rawGet(v);
if (src != null) {
Set<Slice> s = deserializeObject(src);
set.addAll(s);
}
}
Slice out = serializeObject(set);
if (first) {
base().rawPut(key, out, -1L);
} else {
base().rawPut(key, out, null);
}
return Response.OK;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_ping.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_ping extends AbstractRedisOperation {
RO_ping(RedisBase base, List<Slice> params) {
super(base, params, 0, null, null);
}
Slice response() {
if (params().isEmpty()){
return Response.bulkString(new Slice("PONG"));
}
return Response.bulkString(params().get(0));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pop.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
abstract class RO_pop extends AbstractRedisOperation {
RO_pop(RedisBase base, List<Slice> params ) {
super(base, params, 1, null, null);
}
abstract Slice popper(LinkedList<Slice> list);
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
LinkedList<Slice> list;
if (data != null) {
list = deserializeObject(data);
} else {
return Response.NULL;
}
if (list.isEmpty()) {
return Response.NULL;
}
Slice v = popper(list);
base().rawPut(key, serializeObject(list), -1L);
return Response.bulkString(v);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_psetex.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
class RO_psetex extends RO_setex {
RO_psetex(RedisBase base, List<Slice> params) {
super(base, params, 3);
}
@Override
long valueToSet(List<Slice> params){
return convertToLong(new String(params.get(1).data()));
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_pttl.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_pttl extends RO_ttl {
RO_pttl(RedisBase base, List<Slice> params) {
super(base, params);
}
Slice finalReturn(Long pttl){
return Response.integer(pttl);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_publish.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.RedisClient;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
import java.util.Set;
class RO_publish extends AbstractRedisOperation {
RO_publish(RedisBase base, List<Slice> params) {
super(base, params,2, null, null);
}
Slice response(){
Slice channel = params().get(0);
Slice message = params().get(1);
Set<RedisClient> subscibers = base().getSubscribers(channel);
subscibers.forEach(subscriber -> {
Slice response = Response.publishedMessage(channel, message);
subscriber.sendResponse(response, "contacting subscriber");
});
return Response.integer(subscibers.size());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_push.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import ai.grakn.redismock.exception.InternalException;
import com.google.common.collect.Lists;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
abstract class RO_push extends AbstractRedisOperation {
RO_push(RedisBase base, List<Slice> params) {
super(base, params,null, 1, null);
}
abstract void pusher(LinkedList<Slice> list, Slice slice);
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
LinkedList<Slice> list;
if (data != null) {
list = deserializeObject(data);
} else {
list = Lists.newLinkedList();
}
for (int i = 1; i < params().size(); i++) {
pusher(list, params().get(i));
}
try {
base().rawPut(key, serializeObject(list), -1L);
} catch (Exception e) {
throw new InternalException(e.getMessage());
}
return Response.integer(list.size());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_quit.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.RedisClient;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_quit extends AbstractRedisOperation {
private final RedisClient client;
RO_quit(RedisBase base, RedisClient client, List<Slice> params) {
super(base, params,0, null, null);
this.client = client;
}
Slice response() {
client.sendResponse(Response.clientResponse("quit", Response.OK), "quit");
client.close();
return Response.SKIP;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_rpop.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
class RO_rpop extends RO_pop {
RO_rpop(RedisBase base, List<Slice> params ) {
super(base, params);
}
@Override
Slice popper(LinkedList<Slice> list) {
return list.removeLast();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_rpoplpush.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import ai.grakn.redismock.SliceParser;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
class RO_rpoplpush extends AbstractRedisOperation {
RO_rpoplpush(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
RO_rpoplpush(RedisBase base, List<Slice> params, Integer numExpected) {
super(base, params, numExpected, null, null);
}
Slice response() {
Slice source = params().get(0);
Slice target = params().get(1);
//Pop last one
Slice result = new RO_rpop(base(), Collections.singletonList(source)).execute();
Slice valueToPush = SliceParser.consumeParameter(result.data());
//Push it into the other list
new RO_lpush(base(), Arrays.asList(target, valueToPush)).execute();
return result;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_rpush.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Slice;
import java.util.LinkedList;
import java.util.List;
class RO_rpush extends RO_push {
RO_rpush(RedisBase base, List<Slice> params) {
super(base, params);
}
@Override
void pusher(LinkedList<Slice> list, Slice slice) {
list.addLast(slice);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_sadd.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import ai.grakn.redismock.exception.InternalException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
class RO_sadd extends AbstractRedisOperation {
RO_sadd(RedisBase base, List<Slice> params) {
super(base, params,null, 1, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
Set<Slice> set;
if (data != null) {
set = deserializeObject(data);
} else {
set = new HashSet<>();
}
for (int i = 1; i < params().size(); i++) {
set.add(params().get(i));
}
try {
base().rawPut(key, serializeObject(set), -1L);
} catch (Exception e) {
throw new InternalException(e.getMessage());
}
return Response.integer(set.size());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_set.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_set extends AbstractRedisOperation {
RO_set(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
public RO_set(RedisBase base, List<Slice> params, Integer i) {
super(base, params, i, null,null);
}
long valueToSet(List<Slice> params){
return -1L;
}
Slice response() {
base().rawPut(params().get(0), params().get(1), valueToSet(params()));
return Response.OK;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_setbit.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.Arrays;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToByte;
import static ai.grakn.redismock.Utils.convertToNonNegativeInteger;
class RO_setbit extends AbstractRedisOperation {
RO_setbit(RedisBase base, List<Slice> params) {
super(base, params, 3, null, null);
}
Slice response() {
Slice value = base().rawGet(params().get(0));
byte bit = convertToByte(params().get(2).toString());
int pos = convertToNonNegativeInteger(params().get(1).toString());
if (value == null) {
byte[] data = new byte[pos / 8 + 1];
Arrays.fill(data, (byte) 0);
data[pos / 8] = (byte) (bit << (pos % 8));
base().rawPut(params().get(0), new Slice(data), -1L);
return Response.integer(0L);
}
long original;
if (pos / 8 >= value.length()) {
byte[] data = new byte[pos / 8 + 1];
Arrays.fill(data, (byte) 0);
for (int i = 0; i < value.length(); i++) {
data[i] = value.data()[i];
}
data[pos / 8] = (byte) (bit << (pos % 8));
original = 0;
base().rawPut(params().get(0), new Slice(data), -1L);
} else {
byte[] data = value.data();
if ((data[pos / 8] & (1 << (pos % 8))) != 0) {
original = 1;
} else {
original = 0;
}
data[pos / 8] |= (byte) (1 << (pos % 8));
data[pos / 8] &= (byte) (bit << (pos % 8));
base().rawPut(params().get(0), new Slice(data), -1L);
}
return Response.integer(original);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_setex.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
import static ai.grakn.redismock.Utils.convertToLong;
class RO_setex extends RO_set {
RO_setex(RedisBase base, List<Slice> params) {
super(base, params, 3);
}
RO_setex(RedisBase base, List<Slice> params, Integer expectedParams) {
super(base, params, expectedParams);
}
@Override
long valueToSet(List<Slice> params){
return convertToLong(new String(params.get(1).data())) * 1000;
}
Slice response() {
base().rawPut(params().get(0), params().get(2), valueToSet(params()));
return Response.OK;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_setnx.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_setnx extends AbstractRedisOperation {
RO_setnx(RedisBase base, List<Slice> params) {
super(base, params, 2, null, null);
}
Slice response(){
if (base().rawGet(params().get(0)) == null) {
base().rawPut(params().get(0), params().get(1), -1L);
return Response.integer(1);
}
return Response.integer(0);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_smembers.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.LinkedList;
import java.util.List;
import static ai.grakn.redismock.Utils.deserializeObject;
class RO_smembers extends AbstractRedisOperation {
RO_smembers(RedisBase base, List<Slice> params) {
super(base, params, 1, null, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
//Has to be a list because Jedis can only deserialize lists
LinkedList<Slice> set;
if (data != null) {
set = new LinkedList<>(deserializeObject(data));
} else {
set = Lists.newLinkedList();
}
ImmutableList.Builder<Slice> builder = new ImmutableList.Builder<Slice>();
set.forEach(element -> builder.add(Response.bulkString(element)));
return Response.array(builder.build());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_spop.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import static ai.grakn.redismock.Utils.deserializeObject;
import static ai.grakn.redismock.Utils.serializeObject;
class RO_spop extends AbstractRedisOperation {
RO_spop(RedisBase base, List<Slice> params ) {
super(base, params, 1, null, null);
}
Slice response() {
Slice key = params().get(0);
Slice data = base().rawGet(key);
Set<Slice> set;
if (data != null) {
set = deserializeObject(data);
} else {
return Response.NULL;
}
if (set.isEmpty()) {
return Response.NULL;
}
Iterator<Slice> it = set.iterator();
Slice v = it.next();
it.remove();
base().rawPut(key, serializeObject(set), -1L);
return Response.bulkString(v);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_strlen.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_strlen extends AbstractRedisOperation {
RO_strlen(RedisBase base, List<Slice> params) {
super(base, params, 1, null, null);
}
Slice response() {
Slice value = base().rawGet(params().get(0));
if (value == null) {
return Response.integer(0);
}
return Response.integer(value.length());
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_subscribe.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.RedisClient;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_subscribe extends AbstractRedisOperation {
private final RedisClient client;
RO_subscribe(RedisBase base, RedisClient client, List<Slice> params) {
super(base, params,null, 0, null);
this.client = client;
}
Slice response() {
params().forEach(channel -> base().addSubscriber(channel, client));
List<Slice> numSubscriptions = base().getSubscriptions(client);
return Response.subscribedToChannel(numSubscriptions);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_ttl.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import java.util.List;
class RO_ttl extends AbstractRedisOperation {
RO_ttl(RedisBase base, List<Slice> params) {
super(base, params, 1, null, null);
}
Slice finalReturn(Long pttl){
return Response.integer((pttl + 999) / 1000);
}
Slice response() {
Long pttl = base().getTTL(params().get(0));
if (pttl == null) {
return Response.integer(-2L);
}
if (pttl == -1) {
return Response.integer(-1L);
}
return finalReturn(pttl);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RO_unsubscribe.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.RedisClient;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import org.slf4j.LoggerFactory;
import java.util.List;
class RO_unsubscribe extends AbstractRedisOperation {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(RO_unsubscribe.class);
private final RedisClient client;
RO_unsubscribe(RedisBase base, RedisClient client, List<Slice> params) {
super(base, params,null, null, null);
this.client = client;
}
Slice response() {
List<Slice> channelsToUbsubscribeFrom;
if(params().isEmpty()){
LOG.debug("No channels specified therefore unsubscribing from all channels");
channelsToUbsubscribeFrom = base().getSubscriptions(client);
} else {
channelsToUbsubscribeFrom = params();
}
for (Slice channel : channelsToUbsubscribeFrom) {
LOG.debug("Unsubscribing from channel [" + channel + "]");
if(base().removeSubscriber(channel, client)) {
int numSubscriptions = base().getSubscriptions(client).size();
Slice response = Response.unsubscribe(channel, numSubscriptions);
client.sendResponse(Response.clientResponse("unsubscribe", response), "unsubscribe");
}
}
//Skip is sent because we have already responded
return Response.SKIP;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RedisOperation.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.Slice;
/**
* Represents a Redis Operation which can be executed against {@link ai.grakn.redismock.RedisBase}
*/
public interface RedisOperation {
Slice execute();
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/commands/RedisOperationExecutor.java
|
package ai.grakn.redismock.commands;
import ai.grakn.redismock.RedisBase;
import ai.grakn.redismock.RedisClient;
import ai.grakn.redismock.RedisCommand;
import ai.grakn.redismock.Response;
import ai.grakn.redismock.Slice;
import ai.grakn.redismock.exception.WrongNumberOfArgumentsException;
import ai.grakn.redismock.exception.WrongValueTypeException;
import com.google.common.base.Preconditions;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Xiaolu on 2015/4/20.
*/
public class RedisOperationExecutor {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(RedisOperationExecutor.class);
private final RedisClient owner;
private final RedisBase base;
private boolean transactionModeOn;
private List<RedisOperation> transaction;
public RedisOperationExecutor(RedisBase base, RedisClient owner) {
this.base = base;
this.owner = owner;
transactionModeOn = false;
transaction = new ArrayList<>();
}
private RedisOperation buildSimpleOperation(String name, List<Slice> params){
switch(name){
case "set":
return new RO_set(base, params);
case "setex":
return new RO_setex(base, params);
case "psetex":
return new RO_psetex(base, params);
case "setnx":
return new RO_setnx(base, params);
case "setbit":
return new RO_setbit(base, params);
case "append":
return new RO_append(base, params);
case "get":
return new RO_get(base, params);
case "getbit":
return new RO_getbit(base, params);
case "ttl":
return new RO_ttl(base, params);
case "pttl":
return new RO_pttl(base, params);
case "expire":
return new RO_expire(base, params);
case "pexpire":
return new RO_pexpire(base, params);
case "incr":
return new RO_incr(base, params);
case "incrby":
return new RO_incrby(base, params);
case "decr":
return new RO_decr(base, params);
case "decrby":
return new RO_decrby(base, params);
case "pfcount":
return new RO_pfcount(base, params);
case "pfadd":
return new RO_pfadd(base, params);
case "pfmerge":
return new RO_pfmerge(base, params);
case "mget":
return new RO_mget(base, params);
case "mset":
return new RO_mset(base, params);
case "getset":
return new RO_getset(base, params);
case "strlen":
return new RO_strlen(base, params);
case "del":
return new RO_del(base, params);
case "exists":
return new RO_exists(base, params);
case "expireat":
return new RO_expireat(base, params);
case "pexpireat":
return new RO_pexpireat(base, params);
case "lpush":
return new RO_lpush(base, params);
case "rpush":
return new RO_rpush(base, params);
case "lpushx":
return new RO_lpushx(base, params);
case "lrange":
return new RO_lrange(base, params);
case "llen":
return new RO_llen(base, params);
case "lpop":
return new RO_lpop(base, params);
case "rpop":
return new RO_rpop(base, params);
case "lindex":
return new RO_lindex(base, params);
case "rpoplpush":
return new RO_rpoplpush(base, params);
case "brpoplpush":
return new RO_brpoplpush(base, params);
case "subscribe":
return new RO_subscribe(base, owner, params);
case "unsubscribe":
return new RO_unsubscribe(base, owner, params);
case "publish":
return new RO_publish(base, params);
case "flushall":
return new RO_flushall(base, params);
case "lrem":
return new RO_lrem(base, params);
case "quit":
return new RO_quit(base, owner, params);
case "exec":
transactionModeOn = false;
return new RO_exec(base, transaction, params);
case "ping":
return new RO_ping(base, params);
case "keys":
return new RO_keys(base, params);
case "sadd":
return new RO_sadd(base, params);
case "smembers":
return new RO_smembers(base, params);
case "spop":
return new RO_spop(base, params);
default:
throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", name));
}
}
public synchronized Slice execCommand(RedisCommand command) {
Preconditions.checkArgument(command.getParameters().size() > 0);
List<Slice> params = command.getParameters();
List<Slice> commandParams = params.subList(1, params.size());
String name = new String(params.get(0).data()).toLowerCase();
try {
//Transaction handling
if(name.equals("multi")){
newTransaction();
return Response.clientResponse(name, Response.OK);
}
//Checking if we mutating the transaction or the base
RedisOperation redisOperation = buildSimpleOperation(name, commandParams);
if(transactionModeOn){
transaction.add(redisOperation);
} else {
return Response.clientResponse(name, redisOperation.execute());
}
return Response.clientResponse(name, Response.OK);
} catch(UnsupportedOperationException | WrongValueTypeException e){
LOG.error("Malformed request", e);
return Response.error(e.getMessage());
} catch (WrongNumberOfArgumentsException e){
LOG.error("Malformed request", e);
return Response.error(String.format("ERR wrong number of arguments for '%s' command", name));
}
}
private void newTransaction(){
if(transactionModeOn) throw new RuntimeException("Redis mock does not support more than one transaction");
transactionModeOn = true;
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/exception/EOFException.java
|
package ai.grakn.redismock.exception;
/**
* Created by Xiaolu on 2015/4/21.
*/
public class EOFException extends Exception {
public EOFException() {
super();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/exception/InternalException.java
|
package ai.grakn.redismock.exception;
/**
* Created by Xiaolu on 2015/4/22.
*/
public class InternalException extends RuntimeException {
public InternalException() {
super();
}
public InternalException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/exception/ParseErrorException.java
|
package ai.grakn.redismock.exception;
/**
* Created by Xiaolu on 2015/4/20.
*/
public class ParseErrorException extends RuntimeException {
public ParseErrorException() {
super();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/exception/WrongNumberOfArgumentsException.java
|
package ai.grakn.redismock.exception;
/**
* Created by Xiaolu on 2015/4/21.
*/
public class WrongNumberOfArgumentsException extends RuntimeException {
public WrongNumberOfArgumentsException() {
super();
}
}
|
0
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock
|
java-sources/ai/grakn/redis-mock/0.1.6/ai/grakn/redismock/exception/WrongValueTypeException.java
|
package ai.grakn.redismock.exception;
/**
* Created by Xiaolu on 2015/4/22.
*/
public class WrongValueTypeException extends RuntimeException {
public WrongValueTypeException() {
super();
}
public WrongValueTypeException(String message) {
super(message);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/Document.java
|
package ai.grakn.redisq;
/**
* Document stored in the queue. It's required to have an Id.
*/
public interface Document {
/**
* It returns the id of the document
* @return The id of the document as a string
*/
String getIdAsString();
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/ExtendedStateInfo.java
|
package ai.grakn.redisq;
import com.fasterxml.jackson.annotation.JsonProperty;
public class ExtendedStateInfo {
@JsonProperty
private String id;
@JsonProperty
private StateInfo stateInfo;
// Required by Jackson
public ExtendedStateInfo() {}
public ExtendedStateInfo(String id, StateInfo stateInfo) {
this.id = id;
this.stateInfo = stateInfo;
}
public String getId() {
return id;
}
public StateInfo getStateInfo() {
return stateInfo;
}
@Override
public String toString() {
return "ExtendedStateInfo{" +
"id='" + id + '\'' +
", stateInfo=" + stateInfo +
'}';
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/Queue.java
|
package ai.grakn.redisq;
import ai.grakn.redisq.exceptions.StateFutureInitializationException;
import ai.grakn.redisq.exceptions.WaitException;
import ai.grakn.redisq.consumer.QueueConsumer;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import redis.clients.jedis.Jedis;
import redis.clients.util.Pool;
public interface Queue<T> {
/**
* Put a document in the queue
* @param document Document to be pushed to the queue. It must be serialisable.
*/
void push(T document);
/**
* Same as push but it waits for the state of the document to be DONE i.e. the consumer successfully completed
* working on it.
*
* @param document Document to be pushed to the queue. It must be serialisable.
* @param waitTimeout Timeout for the wait. A WaitException is thrown when expired
* @param waitTimeoutUnit Unit for the timeout
* @throws WaitException Thrown if a timeout occurs while waiting for the consumer to be acknowledged in Redis or if the waitTimeout expires
*/
void pushAndWait(T document, long waitTimeout, TimeUnit waitTimeoutUnit) throws WaitException;
/**
* It returns a future that waits for a document to reach a certain state
* Note that this works for DONE and FAILED since they are terminal states.
* @param state Desired state
* @param id Id of the document we are watching
* @return A future that blocks on the state being equal to the given state
* @throws StateFutureInitializationException Thrown if it fails to subscribe to the state
*/
Future<Void> getFutureForDocumentStateWait(Set<State> state, String id) throws StateFutureInitializationException;
/**
* @see ai.grakn.redisq.Queue#getFutureForDocumentStateWait(Set, String)
* Also takes a jedis pool
* @param timeout How long to wait until failing the subscription
* @param unit Unit of the timeout
*/
Future<Void> getFutureForDocumentStateWait(Set<State> state, String id, long timeout, TimeUnit unit, Pool<Jedis> pool) throws StateFutureInitializationException;
/**
* Starts the comsumer for this queue. The consumer takes care of the whole lifecycle, so e.g. in the Redisq
* implementation this includes a thread that consumes the elements in the queue and a thread
* that makes sure there are no dead jobs in the inflight queue.
*/
void startConsumer();
/**
* Sets a flag and waits for all the running threads to terminate
* @throws InterruptedException if the operation is interrupted
*/
void close() throws InterruptedException;
/**
* Getter for the name of the queue
* @return The name of the queue
*/
String getName();
/**
* @see ai.grakn.redisq.Queue#setState(String, State, String)
*/
default void setState(String id, State state) {
setState(id, state, "");
}
/**
* Sets a state "manually"
* @param id Id of the document
* @param state State (e.g. DONE, FAILED)
* @param info Extra info to be stored (e.g. the exception message for the failure)
*/
void setState(String id, State state, String info);
/**
* Retrieves the state for the given id
* @param id Id of the document
* @return Current state of the document. Empty if not available
*/
Optional<StateInfo> getState(String id);
/**
* Retrieves all the states. It gives a snapshot of the keys at the time the method is called.
* If the returned state is empty it means it's not available any longer (the key was
* available when first invoked though).
* @return A stream with all the states currently stored. The stream is null if finished.
*/
Stream<Optional<ExtendedStateInfo>> getStates();
/**
* Getter for the consumer being used
* @return The comsumer
*/
QueueConsumer<T> getConsumer();
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/Redisq.java
|
package ai.grakn.redisq;
import static ai.grakn.redisq.State.DONE;
import static ai.grakn.redisq.State.FAILED;
import static ai.grakn.redisq.State.NEW;
import static ai.grakn.redisq.State.PROCESSING;
import ai.grakn.redisq.consumer.Mapper;
import ai.grakn.redisq.consumer.QueueConsumer;
import ai.grakn.redisq.consumer.RedisqConsumer;
import ai.grakn.redisq.consumer.TimedWrap;
import ai.grakn.redisq.exceptions.DeserializationException;
import ai.grakn.redisq.exceptions.RedisqException;
import ai.grakn.redisq.exceptions.SerializationException;
import ai.grakn.redisq.exceptions.StateFutureInitializationException;
import ai.grakn.redisq.exceptions.WaitException;
import ai.grakn.redisq.util.Names;
import com.codahale.metrics.CachedGauge;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import static com.codahale.metrics.MetricRegistry.name;
import com.codahale.metrics.Timer;
import com.codahale.metrics.Timer.Context;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.rholder.retry.RetryException;
import com.github.rholder.retry.Retryer;
import com.github.rholder.retry.RetryerBuilder;
import com.github.rholder.retry.StopStrategies;
import com.github.rholder.retry.WaitStrategies;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Transaction;
import redis.clients.util.Pool;
public class Redisq<T extends Document> implements Queue<T> {
static final Mapper<StateInfo> stateMapper = new Mapper<>(StateInfo.class);
private static final Logger LOG = LoggerFactory.getLogger(Redisq.class);
private static final Retryer<Integer> CLOSE_RETRIER = RetryerBuilder.<Integer>newBuilder()
.retryIfResult(r -> !(r != null && r == 0))
.withWaitStrategy(WaitStrategies.fixedWait(100, TimeUnit.MILLISECONDS))
.withStopStrategy(StopStrategies.stopAfterDelay(10, TimeUnit.SECONDS))
.build();
private static final int DEFAULT_SUBSCRIPTION_WAIT_TIMEOUT_SECONDS = 30;
private static final int MARGIN_MS = 60_000;
private final String queueName;
private final String inFlightQueueName;
private final String name;
private final Duration timeout;
private final Mapper<TimedWrap<T>> mapper;
private final Names names;
private final int lockTime;
private final Pool<Jedis> jedisPool;
private int ttlStateInfo;
private final ExecutorService threadPool;
private final AtomicBoolean working = new AtomicBoolean(false);
private final AtomicInteger runningThreads = new AtomicInteger(0);
private Duration discardTime;
private final MetricRegistry metricRegistry;
private QueueConsumer<T> subscription;
private Future<?> mainLoop;
private Future<?> inFlightLoop;
private final Timer restoreBlockedTimer;
private final Timer idleTimer;
private final Timer pushTimer;
private final Timer executeWaitTimer;
private final Meter serializationErrors;
public Redisq(String name, Duration timeout, Duration ttlStateInfo, Duration lockTime,
Duration discardTime, Consumer<T> consumer, Class<T> klass, Pool<Jedis> jedisPool, ExecutorService threadPool,
MetricRegistry metricRegistry) {
Preconditions.checkState(ttlStateInfo.minus(lockTime).toMillis() > MARGIN_MS,
"The ttl for a state has to be higher than the time a document is locked for by "
+ MARGIN_MS + "ms");
this.name = name;
this.timeout = timeout;
this.ttlStateInfo = (int) ttlStateInfo.getSeconds();
this.lockTime = (int) lockTime.getSeconds();
this.discardTime = discardTime;
this.metricRegistry = metricRegistry;
this.subscription = new RedisqConsumer<>(consumer, jedisPool, this);
this.names = new Names();
this.queueName = names.queueNameFor(name);
this.inFlightQueueName = names.inFlightQueueNameFor(name);
this.jedisPool = jedisPool;
this.threadPool = threadPool;
this.mapper = new Mapper<>(new ObjectMapper().getTypeFactory()
.constructParametricType(TimedWrap.class, klass));
this.pushTimer = metricRegistry.timer(name(this.getClass(), "push"));
this.idleTimer = metricRegistry.timer(name(this.getClass(), "idle"));
metricRegistry.register(name(this.getClass(), "queue", "size"),
new CachedGauge<Long>(15, TimeUnit.SECONDS) {
@Override
protected Long loadValue() {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.llen(queueName);
}
}
});
this.restoreBlockedTimer = metricRegistry.timer(name(this.getClass(), "restore_blocked"));
this.executeWaitTimer = metricRegistry.timer(name(this.getClass(), "execute_wait"));
this.serializationErrors = metricRegistry
.meter(name(this.getClass(), "serialization_errors"));
}
@Override
public void push(T document) {
long timestampMs = System.currentTimeMillis();
String serialized;
String stateSerialized;
try {
serialized = mapper.serialize(new TimedWrap<>(document, timestampMs));
stateSerialized = stateMapper.serialize(new StateInfo(NEW, timestampMs, ""));
} catch (SerializationException e) {
serializationErrors.mark();
throw new RedisqException("Could not serialize element " + document.getIdAsString(), e);
}
LOG.debug("Jedis active: {}, idle: {}", jedisPool.getNumActive(), jedisPool.getNumIdle());
try (Jedis jedis = jedisPool.getResource(); Timer.Context ignored = pushTimer.time();) {
Transaction transaction = jedis.multi();
String id = document.getIdAsString();
String lockId = names.lockKeyFromId(id);
transaction.setex(lockId, lockTime, "locked");
transaction.lpush(queueName, id);
transaction.setex(names.contentKeyFromId(id), ttlStateInfo, serialized);
transaction.setex(names.stateKeyFromId(id), ttlStateInfo, stateSerialized);
transaction.publish(names.stateChannelKeyFromId(id), stateSerialized);
transaction.exec();
LOG.debug("Pushed {} with lockTime {}s lock id: {}", id, lockTime, lockId);
}
}
@Override
public void startConsumer() {
LOG.debug("Starting consumer {}", name);
working.set(true);
mainLoop = Executors.newSingleThreadExecutor().submit(() -> {
// We keep one resource for the iteration
while (working.get()) {
iteration();
}
});
inFlightLoop = Executors.newSingleThreadExecutor().submit(() -> {
while (working.get()) {
inflightIteration();
try {
TimeUnit.MILLISECONDS.sleep(5000);
} catch (InterruptedException e) {
LOG.warn("Inflight sleep interrupted", e);
}
}
});
}
@Override
public Future<Void> getFutureForDocumentStateWait(Set<State> state, String id)
throws StateFutureInitializationException {
return new StateFuture(state, id, jedisPool, DEFAULT_SUBSCRIPTION_WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS, metricRegistry);
}
@Override
public Future<Void> getFutureForDocumentStateWait(Set<State> state, String id, long timeout,
TimeUnit unit, Pool<Jedis> pool) throws StateFutureInitializationException {
return new StateFuture(state, id, pool, timeout, unit, metricRegistry);
}
private void inflightIteration() {
List<String> processingElements;
try (Jedis jedis = jedisPool.getResource()) {
processingElements = jedis.lrange(inFlightQueueName, 0, -1);
}
LOG.debug("Found {} documents in flight", processingElements.size());
processingElements
.forEach(id -> {
try (Jedis jedis = jedisPool.getResource()) {
String lockId = names.lockKeyFromId(id);
// TODO We might get more than one consumer doing this
Long ttl = jedis.ttl(lockId);
LOG.debug("Id {} has {} ttl", id, ttl);
if (ttl == 0 || ttl == -2) {
Optional<StateInfo> state = getState(id);
if (state.isPresent()) {
if (state.get().getState().equals(PROCESSING)) {
LOG.trace("Found unlocked element {}, lockId({}), ttl={}", id,
lockId, ttl);
try (Context ignored = restoreBlockedTimer.time()) {
// Restore it in the main queue
Transaction multi = jedis.multi();
multi.lrem(inFlightQueueName, 1, id);
multi.lpush(queueName, id);
multi.exec();
}
} else {
jedis.lrem(inFlightQueueName, 1, id);
}
} else {
LOG.warn("Found expired document in inflight but no state info found for {}", id);
}
}
}
});
}
private void iteration() {
long timestampMs = System.currentTimeMillis();
String value;
String key;
try (Jedis jedis = jedisPool.getResource()) {
String id;
try (Context ignored = idleTimer.time()) {
id = jedis.brpoplpush(queueName, inFlightQueueName, (int) timeout.getSeconds());
}
// If something goes wrong after this, the job will be stuck in inflightIteration
if (id != null) {
key = names.contentKeyFromId(id);
value = lockAndGetDocument(timestampMs, key, jedis, id);
} else {
LOG.debug("Empty queue");
return;
}
}
if (value != null && key != null) {
TimedWrap<T> element;
try {
element = mapper.deserialize(value);
} catch (DeserializationException e) {
LOG.error("Failed deserialization, skipping element: {}", value, e);
return;
}
try {
if (Duration.ofMillis(timestampMs - element.getTimestampMs())
.compareTo(discardTime) < 0) {
try (Context ignored = executeWaitTimer.time()) {
execute(element);
}
}
} catch (RejectedExecutionException e) {
processRejected(value, key, element, e);
}
}
}
private String lockAndGetDocument(long timestampMs, String key, Jedis jedis, String id) {
String value;
LOG.debug("Found id {}", id);
jedis.setex(names.lockKeyFromId(id), lockTime, "locked");
Optional<StateInfo> state = getState(id);
if (state.isPresent() && !state.get().getState().equals(NEW)) {
LOG.warn("State already present for {}: {}", id, state.get().getState());
}
setState(jedis, timestampMs, id, PROCESSING, "");
value = jedis.get(key);
return value;
}
private void processRejected(String value, String key, TimedWrap<T> element,
RejectedExecutionException e) {
try (Jedis jedis = jedisPool.getResource()) {
jedis.lpush(key, value);
LOG.error("Rejected execution, re-enqueued {}",
element.getElement().getIdAsString(), e);
} catch (Exception pushE) {
LOG.error("Could not re-enqueue {}", element.getElement().getIdAsString(), e);
}
}
private void execute(TimedWrap<T> element) {
threadPool.execute(() -> {
runningThreads.incrementAndGet();
try {
subscription.process(element.getElement());
} finally {
runningThreads.decrementAndGet();
}
});
}
@Override
public void setState(String id, State state, String info) {
long timestampMs = System.currentTimeMillis();
try (Jedis jedis = jedisPool.getResource()) {
setState(jedis, timestampMs, id, state, info);
}
}
public void setState(Jedis jedis, long timestampMs, String id, State state, String info) {
String stateSerialized;
StateInfo stateInfo = new StateInfo(state, timestampMs, info);
try {
stateSerialized = stateMapper.serialize(stateInfo);
} catch (SerializationException e) {
throw new RedisqException("Could not serialize state " + stateInfo);
}
jedis.setex(names.stateKeyFromId(id), ttlStateInfo, stateSerialized);
jedis.publish(names.stateChannelKeyFromId(id), stateSerialized);
}
@Override
public Optional<StateInfo> getState(String id) {
String key = names.stateKeyFromId(id);
return getStateInfoFromRedisKey(key);
}
@Override
public Stream<Optional<ExtendedStateInfo>> getStates() {
Stream<String> keys;
try (Jedis jedis = jedisPool.getResource()) {
keys = jedis.keys(names.stateKeyFromId("*")).stream();
}
return keys.map(key -> {
Optional<StateInfo> stateInfoFromRedisKey = getStateInfoFromRedisKey(key);
return stateInfoFromRedisKey.map(stateInfo -> new ExtendedStateInfo(key, stateInfo));
});
}
@Override
public void close() throws InterruptedException {
LOG.debug("Closing {}", name);
synchronized (this) {
working.set(false);
if (mainLoop != null) {
try {
mainLoop.get();
inFlightLoop.get();
try {
CLOSE_RETRIER.call(this.runningThreads::get);
} catch (RetryException e) {
LOG.warn("Closing while some threads are still running");
}
} catch (ExecutionException e) {
LOG.error("Error during close", e);
}
}
}
LOG.debug("Shutting down queue {}", name);
threadPool.shutdown();
threadPool.awaitTermination(1, TimeUnit.MINUTES);
LOG.info("Closed {}", name);
}
@Override
public String getName() {
return name;
}
@Override
public QueueConsumer<T> getConsumer() {
return subscription;
}
@Override
public void pushAndWait(T dummyObject, long waitTimeout, TimeUnit waitTimeoutUnit)
throws WaitException {
Future<Void> f = getFutureForDocumentStateWait(ImmutableSet.of(DONE, FAILED),
dummyObject.getIdAsString());
push(dummyObject);
try {
f.get(waitTimeout, waitTimeoutUnit);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new WaitException(
"Could not wait for " + dummyObject.getIdAsString() + " to be done", e);
}
}
public Names getNames() {
return names;
}
private Optional<StateInfo> getStateInfoFromRedisKey(String key) {
try {
String element;
try (Jedis jedis = jedisPool.getResource()) {
element = jedis.get(key);
}
if (element == null) {
return Optional.empty();
} else {
return Optional.of(stateMapper.deserialize(element));
}
} catch (DeserializationException e) {
throw new RedisqException("Could not deserialize state info for " + key, e);
}
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/RedisqBuilder.java
|
package ai.grakn.redisq;
import ai.grakn.redisq.util.Names;
import com.codahale.metrics.MetricRegistry;
import redis.clients.jedis.Jedis;
import redis.clients.util.Pool;
import java.time.Duration;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import static java.time.temporal.ChronoUnit.*;
public class RedisqBuilder<T extends Document> {
private String name = "redisq_" + Names.getRandomString();
private Duration timeout = Duration.of(5, SECONDS);
private Duration ttlStateInfo = Duration.of(1, DAYS);
private Duration lockTime = Duration.of(5, MINUTES);
private Duration discardTime = Duration.of(1, HOURS);
private ExecutorService threadPool = Executors.newFixedThreadPool(4 );
private Consumer<T> consumer;
private Pool<Jedis> jedisPool;
private Class<T> documentClass;
private MetricRegistry metricRegistry = new MetricRegistry();
public RedisqBuilder<T> setName(String name) {
this.name = name;
return this;
}
public RedisqBuilder<T> setTimeout(Duration timeout) {
this.timeout = timeout;
return this;
}
public RedisqBuilder<T> setTtlStateInfo(Duration ttlStateInfo) {
this.ttlStateInfo = ttlStateInfo;
return this;
}
public RedisqBuilder<T> setLockTime(Duration lockTime) {
this.lockTime = lockTime;
return this;
}
public RedisqBuilder<T> setDiscardTime(Duration discardTime) {
this.discardTime = discardTime;
return this;
}
public RedisqBuilder<T> setConsumer(Consumer<T> consumer) {
this.consumer = consumer;
return this;
}
public RedisqBuilder<T> setJedisPool(Pool<Jedis> jedisPool) {
this.jedisPool = jedisPool;
return this;
}
public RedisqBuilder<T> setThreadPool(ExecutorService threadPool) {
this.threadPool = threadPool;
return this;
}
public RedisqBuilder<T> setDocumentClass(Class<T> documentClass) {
this.documentClass = documentClass;
return this;
}
public RedisqBuilder<T> setMetricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
return this;
}
public Redisq<T> createRedisq() {
return new Redisq<>(name, timeout, ttlStateInfo, lockTime, discardTime, consumer, documentClass, jedisPool, threadPool, metricRegistry);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/State.java
|
package ai.grakn.redisq;
public enum State {
NEW, PROCESSING, FAILED, DONE;
public boolean isFinal() {
return this.equals(DONE) || this.equals(FAILED);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/StateFuture.java
|
package ai.grakn.redisq;
import ai.grakn.redisq.exceptions.DeserializationException;
import ai.grakn.redisq.exceptions.StateFutureInitializationException;
import ai.grakn.redisq.exceptions.SubscriptionInterruptedException;
import ai.grakn.redisq.util.Names;
import com.codahale.metrics.MetricRegistry;
import static com.codahale.metrics.MetricRegistry.name;
import com.codahale.metrics.Timer;
import com.codahale.metrics.Timer.Context;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPubSub;
import redis.clients.jedis.exceptions.JedisConnectionException;
import redis.clients.util.Pool;
import java.util.concurrent.*;
public class StateFuture implements Future<Void> {
private static final Logger LOG = LoggerFactory.getLogger(StateFuture.class);
private final JedisPubSub sub;
private final CompletableFuture<Void> subscription;
private final Names names;
private final Timer subscribeWait;
private Set<State> targetState;
private String id;
private final Pool<Jedis> jedisPool;
private final CountDownLatch latch = new CountDownLatch(1);
StateFuture(Set<State> targetState, String id, Pool<Jedis> jedisPool,
long subscriptionWaitTimeout, TimeUnit subscriptionWaitUnit,
MetricRegistry metricRegistry) throws StateFutureInitializationException {
this.targetState = targetState;
this.id = id;
this.jedisPool = jedisPool;
this.names = new Names();
this.sub = new JedisPubSub() {
@Override
public void onSubscribe(String channel, int subscribedChannels) {
latch.countDown();
}
@Override
public void onUnsubscribe(String channel, int subscribedChannels) {
latch.countDown();
}
@Override
public void onMessage(String channel, String message) {
try {
StateInfo s = Redisq.stateMapper.deserialize(message);
if (targetState.contains(s.getState())) {
latch.countDown();
LOG.debug("Received expected state, completing {}", channel);
unsubscribe(channel);
}
} catch (DeserializationException e) {
LOG.error("Could not deserialise state {}", id, e);
}
}
};
subscribeWait = metricRegistry.timer(name(StateFuture.class, "subscribe_wait"));
Timer initWaitTimer = metricRegistry.timer(name(StateFuture.class, "init_wait"));
try (Context ignored = initWaitTimer.time()) {
this.subscription = subscribe(subscriptionWaitTimeout, subscriptionWaitUnit);
} catch (InterruptedException e) {
throw new StateFutureInitializationException("Could not initialise StateFuture for id " + id, e);
}
}
private CompletableFuture<Void> subscribe(long timeout, TimeUnit unit) throws InterruptedException {
CompletableFuture<Void> f = CompletableFuture.runAsync(() -> {
try{
try (Jedis jedis = jedisPool.getResource()) {
String state = jedis.get(names.stateKeyFromId(id));
if (state != null) {
try {
if (targetState.contains(Redisq.stateMapper.deserialize(state).getState())) {
LOG.debug("Unsubscribed because status was already as expected {}", id);
return;
}
} catch (DeserializationException e) {
LOG.error("Could not deserialize state for {}", id, e);
}
}
String stateChannel = new Names().stateChannelKeyFromId(id);
LOG.debug("Waiting for changes to {}", stateChannel);
try (Context ignored = subscribeWait.time()) {
jedis.subscribe(sub, stateChannel);
}
} finally{
latch.countDown();
}
} catch (JedisConnectionException e) {
if (jedisPool.isClosed()) {
throw new SubscriptionInterruptedException("Subscription interrupted because the Jedis connection was closed for id " + id, e);
} else {
LOG.error("Could not connect to Redis while subscribing to {}", id, e);
throw e;
}
}
});
if (!f.isCompletedExceptionally() && !f.isCancelled()) {
latch.await(timeout, unit);
LOG.debug("Subscribed successfully to {}", id);
} else {
LOG.error("QueueConsumer ended before expected");
}
return f;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StateFuture that = (StateFuture) o;
return targetState == that.targetState && (id != null ? id.equals(that.id) : that.id == null);
}
@Override
public int hashCode() {
int result = targetState != null ? targetState.hashCode() : 0;
result = 31 * result + (id != null ? id.hashCode() : 0);
return result;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
sub.unsubscribe(names.stateChannelKeyFromId(id));
return subscription.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return subscription.isCancelled();
}
@Override
public boolean isDone() {
return subscription.isDone();
}
@Override
public Void get() throws InterruptedException, ExecutionException {
return subscription.get();
}
@Override
public Void get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return subscription.get(timeout, unit);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/StateInfo.java
|
package ai.grakn.redisq;
import com.fasterxml.jackson.annotation.JsonProperty;
public class StateInfo {
@JsonProperty
private State state;
@JsonProperty
private long lastProcessed;
@JsonProperty
private String info = null;
// Required by Jackson
public StateInfo() {}
public StateInfo(State state, long lastProcessed, String info) {
this.state = state;
this.lastProcessed = lastProcessed;
this.info = info;
}
public State getState() {
return state;
}
public long getLastProcessed() {
return lastProcessed;
}
public String getInfo() {
return info;
}
@Override
public String toString() {
return "StateInfo{" +
"state=" + state +
", lastProcessed=" + lastProcessed +
", info='" + info + '\'' +
'}';
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/consumer/Mapper.java
|
package ai.grakn.redisq.consumer;
import ai.grakn.redisq.exceptions.DeserializationException;
import ai.grakn.redisq.exceptions.SerializationException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
/**
* Utility class that factors Jackson serialise/deserialise logic
* @param <T> Class of the object mapped into Json
*/
public class Mapper<T> {
private static final ObjectMapper objectMapper = new ObjectMapper();
private JavaType tClass;
public Mapper(JavaType tClass) {
this.tClass = tClass;
}
public Mapper(Class<T> tClass) {
this.tClass = objectMapper.getTypeFactory().constructType(tClass);
}
public String serialize(T element) throws SerializationException {
Class<?> aClass = element.getClass();
if (!objectMapper.canSerialize(aClass)){
throw new SerializationException("Could not serialize class " + aClass.getName(), element);
} else {
try {
return objectMapper.writeValueAsString(element);
} catch (JsonProcessingException e) {
throw new SerializationException("Error while trying to serialize element", element, e);
}
}
}
public T deserialize(String element) throws DeserializationException {
try {
return objectMapper.readValue(element, tClass);
} catch (IOException e) {
throw new DeserializationException("Could not deserialize string", element, e);
}
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/consumer/QueueConsumer.java
|
package ai.grakn.redisq.consumer;
/**
* Interface for queue consumers.
*
* @param <T> Class of the object that is consumed.
*/
public interface QueueConsumer<T> {
/**
* This method implements the logic for consuming documents from the queue.
* The result is not returned but some effect should be produced.
*
* @param document Document read from the queue
*/
void process(T document);
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/consumer/RedisqConsumer.java
|
package ai.grakn.redisq.consumer;
import ai.grakn.redisq.Document;
import ai.grakn.redisq.Redisq;
import ai.grakn.redisq.State;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.exceptions.JedisConnectionException;
import redis.clients.util.Pool;
import java.util.function.Consumer;
import static ai.grakn.redisq.State.DONE;
import static ai.grakn.redisq.State.FAILED;
/**
* TODO: docs
*
* @param <T>
*/
public class RedisqConsumer<T extends Document> implements QueueConsumer<T> {
private static final Logger LOG = LoggerFactory.getLogger(RedisqConsumer.class);
private Consumer<T> consumer;
private Pool<Jedis> jedisPool;
private Redisq<T> tRedisq;
/**
* TODO: docs
*
* @param consumer
* @param jedisPool
* @param tRedisq
*/
public RedisqConsumer(Consumer<T> consumer, Pool<Jedis> jedisPool, Redisq<T> tRedisq) {
this.consumer = consumer;
this.jedisPool = jedisPool;
this.tRedisq = tRedisq;
}
@Override
public void process(T element) {
try {
consumer
.andThen(e -> updateState(e, DONE, ""))
.accept(element);
} catch (Exception e) {
updateState(element, FAILED, e.getMessage());
}
}
private void updateState(T element, State state, String info) {
try(Jedis jedis = jedisPool.getResource()) {
String id = element.getIdAsString();
tRedisq.setState(jedis, System.currentTimeMillis(), id, state, info);
jedis.del(tRedisq.getNames().lockKeyFromId(id));
} catch (JedisConnectionException jedisException) {
LOG.error("Pool is full or terminated. Active: {}, idle: {}", jedisPool.getNumActive(), jedisPool.getNumIdle());
throw jedisException;
} catch (Exception e) {
LOG.error("Unexpected exception while updating state for {}", element, e);
throw e;
}
LOG.debug("Status {} set as {}", element.getIdAsString(), state);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/consumer/TimedWrap.java
|
package ai.grakn.redisq.consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
public class TimedWrap<T> {
@JsonProperty
private T element;
@JsonProperty
private long timestampMs;
// Required by Jackson
public TimedWrap() {}
public TimedWrap(T element, long timestampMs) {
this.element = element;
this.timestampMs = timestampMs;
}
public T getElement() {
return element;
}
public long getTimestampMs() {
return timestampMs;
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/DeserializationException.java
|
package ai.grakn.redisq.exceptions;
import java.io.IOException;
public class DeserializationException extends IOException {
private final String element;
public DeserializationException(String message, String element, Throwable e) {
super(message, e);
this.element = element;
}
public String getElement() {
return element;
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/RedisqException.java
|
package ai.grakn.redisq.exceptions;
public class RedisqException extends RuntimeException {
public RedisqException(String s, Exception e) {
super(s, e);
}
public RedisqException(String s) {
super(s);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/SerializationException.java
|
package ai.grakn.redisq.exceptions;
import java.io.IOException;
public class SerializationException extends IOException {
private final Object element;
public <T> SerializationException(String message, Object element) {
super(message);
this.element = element;
}
public <T> SerializationException(String message, Object element, Throwable throwable) {
super(message, throwable);
this.element = element;
}
public Object getElement() {
return element;
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/StateFutureInitializationException.java
|
package ai.grakn.redisq.exceptions;
public class StateFutureInitializationException extends WaitException {
public StateFutureInitializationException(String message, InterruptedException e) {
super(message, e);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/SubscriptionInterruptedException.java
|
package ai.grakn.redisq.exceptions;
public class SubscriptionInterruptedException extends RuntimeException {
public SubscriptionInterruptedException(String message, Exception e) {
super(message, e);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/exceptions/WaitException.java
|
package ai.grakn.redisq.exceptions;
public class WaitException extends Exception {
public WaitException(String message, Exception e) {
super(message, e);
}
}
|
0
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq
|
java-sources/ai/grakn/redisq/0.0.5/ai/grakn/redisq/util/Names.java
|
package ai.grakn.redisq.util;
import java.math.BigInteger;
import java.util.Random;
public class Names {
private static final Random random = new Random();
private static final int NUM_BITS = 130;
private static final int RADIX = 32;
private static final String PREFIX = "redisq:";
private static final String QUEUE = "queue:";
private static final String INFLIGHT = "inflight:";
private static final String CONTENT = "content:";
private static final String STATE = "state:";
private static final String STATE_CHANNEL = "state:channel:";
private static final String LOCK = "lock:";
public String queueNameFor(String name) {
return PREFIX + QUEUE + name;
}
public String inFlightQueueNameFor(String name) {
return PREFIX + INFLIGHT + name;
}
public String stateKeyFromId(String idAsString) {
return PREFIX + STATE + encoded(idAsString);
}
public String stateChannelKeyFromId(String idAsString) {
return PREFIX + STATE_CHANNEL + encoded(idAsString);
}
public String contentKeyFromId(String idAsString) {
return PREFIX + CONTENT + encoded(idAsString);
}
public String lockKeyFromId(String idAsString) {
return PREFIX + LOCK + encoded(idAsString);
}
public static String getRandomString() {
return new BigInteger(NUM_BITS, random).toString(RADIX);
}
private String encoded(String idAsString) {
return idAsString.replaceAll(" ", "_");
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/GraknDb.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import com.ldbc.driver.Db;
import com.ldbc.driver.DbConnectionState;
import com.ldbc.driver.DbException;
import com.ldbc.driver.control.LoggingService;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery1PersonProfile;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery4MessageContent;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery5MessageCreator;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery6MessageForum;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery7MessageReplies;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate1AddPerson;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate2AddPostLike;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate3AddCommentLike;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate4AddForum;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate5AddForumMembership;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate6AddPost;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate7AddComment;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate8AddFriendship;
import java.io.IOException;
import java.util.Map;
/**
* Register the queries that are implemented in Grakn with the ldbc driver.
*
* @author sheldon, felix
*/
@SuppressWarnings("unused")//Used as part of SNB load test
public class GraknDb extends Db {
private GraknDbConnectionState connectionState = null;
@Override
protected void onInit(Map<String, String> properties, LoggingService loggingService) throws DbException {
connectionState = new GraknDbConnectionState(properties);
registerOperationHandler(LdbcShortQuery1PersonProfile.class, GraknShortQueryHandlers.LdbcShortQuery1PersonProfileHandler.class);
// TODO: This query is very slow because it does ordering within Graql
// registerOperationHandler(LdbcShortQuery2PersonPosts.class, GraknShortQueryHandlers.LdbcShortQuery2PersonPostsHandler.class);
// TODO: This query seems to hold up validation starting for unclear reasons
// registerOperationHandler(LdbcShortQuery3PersonFriends.class, GraknShortQueryHandlers.LdbcShortQuery3PersonFriendsHandler.class);
registerOperationHandler(LdbcShortQuery4MessageContent.class, GraknShortQueryHandlers.LdbcShortQuery4MessageContentHandler.class);
registerOperationHandler(LdbcShortQuery5MessageCreator.class, GraknShortQueryHandlers.LdbcShortQuery5MessageCreatorHandler.class);
registerOperationHandler(LdbcShortQuery6MessageForum.class, GraknShortQueryHandlers.LdbcShortQuery6MessageForumHandler.class);
registerOperationHandler(LdbcShortQuery7MessageReplies.class, GraknShortQueryHandlers.LdbcShortQuery7MessageRepliesHandler.class);
// IMPORTANT: the other queries are dependent on these update queries having executed
registerOperationHandler(LdbcUpdate1AddPerson.class, GraknUpdateQueryHandlers.LdbcUpdate1AddPersonHandler.class);
registerOperationHandler(LdbcUpdate2AddPostLike.class, GraknUpdateQueryHandlers.LdbcUpdate2AddPostLikeHandler.class);
registerOperationHandler(LdbcUpdate3AddCommentLike.class, GraknUpdateQueryHandlers.LdbcUpdate3AddCommentLikeHandler.class);
registerOperationHandler(LdbcUpdate4AddForum.class, GraknUpdateQueryHandlers.LdbcUpdate4AddForumHandler.class);
registerOperationHandler(LdbcUpdate5AddForumMembership.class, GraknUpdateQueryHandlers.LdbcUpdate5AddForumMembershipHandler.class);
registerOperationHandler(LdbcUpdate6AddPost.class, GraknUpdateQueryHandlers.LdbcUpdate6AddPostHandler.class);
registerOperationHandler(LdbcUpdate7AddComment.class, GraknUpdateQueryHandlers.LdbcUpdate7AddCommentHandler.class);
registerOperationHandler(LdbcUpdate8AddFriendship.class, GraknUpdateQueryHandlers.LdbcUpdate8AddFriendshipHandler.class);
// TODO: disabling because they are slow
// registerOperationHandler(LdbcQuery1.class, GraknQueryHandlers.LdbcQuery1Handler.class);
// registerOperationHandler(LdbcQuery2.class, GraknQueryHandlers.LdbcQuery2Handler.class);
// registerOperationHandler(LdbcQuery8.class, GraknQueryHandlers.LdbcQuery8Handler.class);
// registerOperationHandler(LdbcQuery13.class, GraknQueryHandlers.LdbcQuery13Handler.class);
}
@Override
protected void onClose() throws IOException {
connectionState.close();
}
@Override
protected DbConnectionState getConnectionState() throws DbException {
return connectionState;
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/GraknDbConnectionState.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import ai.grakn.engine.GraknConfig;
import ai.grakn.factory.EmbeddedGraknSession;
import ai.grakn.factory.GraknTxFactoryBuilder;
import com.ldbc.driver.DbConnectionState;
import java.util.Map;
/**
* Implementation of the database connection for grakn. Essentially a proxy for a session.
*
* @author sheldon, felix
*/
public class GraknDbConnectionState extends DbConnectionState {
private final GraknSession session;
/**
* Initiate the grakn session.
*
* @param properties the properties from the ldbc properties file
*/
public GraknDbConnectionState(Map<String, String> properties) {
String keyspace;
keyspace = properties.get("ai.grakn.keyspace");
session = EmbeddedGraknSession.createEngineSession(Keyspace.of(keyspace), GraknConfig.create(), GraknTxFactoryBuilder.getInstance());
}
@Override
public void close(){
session.close();
}
/**
* Get the open grakn session.
*
* @return the open session
*/
GraknSession session() {
return this.session;
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/GraknQueryHandlers.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import ai.grakn.concept.Attribute;
import ai.grakn.concept.Concept;
import ai.grakn.concept.ConceptId;
import ai.grakn.concept.Entity;
import ai.grakn.graql.Match;
import ai.grakn.graql.Order;
import ai.grakn.graql.Var;
import ai.grakn.graql.answer.ConceptList;
import ai.grakn.graql.answer.ConceptMap;
import com.ldbc.driver.DbException;
import com.ldbc.driver.OperationHandler;
import com.ldbc.driver.ResultReporter;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery1;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery13;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery13Result;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery1Result;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery2;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery2Result;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery8;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery8Result;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static ai.grakn.SNB.$content;
import static ai.grakn.SNB.$date;
import static ai.grakn.SNB.$firstName;
import static ai.grakn.SNB.$friend;
import static ai.grakn.SNB.$friendId;
import static ai.grakn.SNB.$lastName;
import static ai.grakn.SNB.$message;
import static ai.grakn.SNB.$messageId;
import static ai.grakn.SNB.$person;
import static ai.grakn.SNB.BIRTHDAY;
import static ai.grakn.SNB.BROWSER_USED;
import static ai.grakn.SNB.CLASS_YEAR;
import static ai.grakn.SNB.CONTENT;
import static ai.grakn.SNB.CREATION_DATE;
import static ai.grakn.SNB.EMAIL;
import static ai.grakn.SNB.FIRST_NAME;
import static ai.grakn.SNB.GENDER;
import static ai.grakn.SNB.HAS_CREATOR;
import static ai.grakn.SNB.IMAGE_FILE;
import static ai.grakn.SNB.IS_LOCATED_IN;
import static ai.grakn.SNB.KNOWS;
import static ai.grakn.SNB.LAST_NAME;
import static ai.grakn.SNB.LOCATION_IP;
import static ai.grakn.SNB.MESSAGE_ID;
import static ai.grakn.SNB.NAME;
import static ai.grakn.SNB.PERSON_ID;
import static ai.grakn.SNB.REPLY;
import static ai.grakn.SNB.REPLY_OF;
import static ai.grakn.SNB.SPEAKS;
import static ai.grakn.SNB.STUDY_AT;
import static ai.grakn.SNB.WORK_AT;
import static ai.grakn.SNB.WORK_FROM;
import static ai.grakn.SNB.by;
import static ai.grakn.SNB.resource;
import static ai.grakn.SNB.toEpoch;
import static ai.grakn.graql.Graql.compute;
import static ai.grakn.graql.Graql.lte;
import static ai.grakn.graql.Graql.match;
import static ai.grakn.graql.Graql.or;
import static ai.grakn.graql.Graql.var;
import static ai.grakn.util.GraqlSyntax.Compute.Method.PATH;
/**
* Implementations of the LDBC SNB complex queries.
*
* @author sheldon
*/
public class GraknQueryHandlers {
private GraknQueryHandlers() {
}
/**
* Complex Query 2
*/
public static class LdbcQuery2Handler implements OperationHandler<LdbcQuery2, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcQuery2 ldbcQuery2, GraknDbConnectionState dbConnectionState, ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graknTx = session.transaction(GraknTxType.READ)) {
LocalDateTime maxDate = SNB.fromDate(ldbcQuery2.maxDate());
// to make this query execute faster split it into two parts:
// the first does the ordering
// the second fetches the resources
Match graknLdbcQuery2 = match(
var().rel($person.has(PERSON_ID, ldbcQuery2.personId())).rel($friend).isa(KNOWS),
var().rel($friend).rel($message).isa(HAS_CREATOR),
$message.has(CREATION_DATE, $date).has(MESSAGE_ID, $messageId),
$date.val(lte(maxDate)));
List<ConceptMap> rawResult = graknLdbcQuery2.orderBy($date, Order.desc)
.limit(ldbcQuery2.limit()).withTx(graknTx).get().execute();
// process the query results
List<LdbcQuery2Result> result = rawResult.stream()
// sort first by date and then by message id
.sorted(Comparator.comparing(by($date)).reversed().thenComparing(by($messageId)))
.map(map -> {
// fetch the resources attached to entities in the queries
Match queryExtendedInfo = match(
$friend.has(FIRST_NAME, $firstName).has(LAST_NAME, $lastName).has(PERSON_ID, $friendId),
var().rel($friend).rel($message).isa(HAS_CREATOR),
$message.has(CREATION_DATE, $date)
.has(MESSAGE_ID, SNB.<Long>resource(map, $messageId)),
or($message.has(CONTENT, $content), $message.has(IMAGE_FILE, $content)));
ConceptMap extendedInfo = queryExtendedInfo.withTx(graknTx).get().execute().iterator().next();
// prepare the answer from the original query and the query for extended information
return new LdbcQuery2Result(
resource(extendedInfo, $friendId),
resource(extendedInfo, $firstName),
resource(extendedInfo, $lastName),
resource(map, $messageId),
resource(extendedInfo, $content),
toEpoch(resource(map, $date)));
}).collect(Collectors.toList());
resultReporter.report(0, result, ldbcQuery2);
}
}
}
/**
* Complex Query 8
*/
public static class LdbcQuery8Handler implements OperationHandler<LdbcQuery8, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcQuery8 ldbcQuery8, GraknDbConnectionState dbConnectionState, ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graknTx = session.transaction(GraknTxType.READ)) {
// for speed the query is again split into the ordering and limit phase
Var reply = var("aReply");
Var responder = var("responder");
Var responderId = var("responderId");
Match orderQuery = match(
$person.has(PERSON_ID, ldbcQuery8.personId()),
var().rel($person).rel($message).isa(HAS_CREATOR),
var().rel($message).rel(REPLY, reply).isa(REPLY_OF),
reply.has(CREATION_DATE, $date).has(MESSAGE_ID, $messageId)
);
List<ConceptMap> rawResult = orderQuery.withTx(graknTx)
.orderBy($date, Order.desc).limit(ldbcQuery8.limit()).get().execute();
// sort first by date and then by message id
// process the query results
List<LdbcQuery8Result> result = rawResult.stream()
.sorted(Comparator.comparing(by($date)).reversed().thenComparing(by($messageId)))
.map(map -> {
// fetch the resources attached to entities in the queries
Match queryExtendedInfo = match(
reply.has(MESSAGE_ID, SNB.<Long>resource(map, $messageId)),
or(reply.has(CONTENT, $content), reply.has(IMAGE_FILE, $content)),
var().rel(reply).rel(responder).isa(HAS_CREATOR),
responder.has(PERSON_ID, responderId).has(FIRST_NAME, $firstName).has(LAST_NAME, $lastName)
);
ConceptMap extendedInfo = queryExtendedInfo.withTx(graknTx).get().execute().iterator().next();
// prepare the answer from the original query and the query for extended information
return new LdbcQuery8Result(
resource(extendedInfo, responderId),
resource(extendedInfo, $firstName),
resource(extendedInfo, $lastName),
toEpoch(resource(map, $date)),
resource(map, $messageId),
resource(extendedInfo, $content));
}).collect(Collectors.toList());
resultReporter.report(0, result, ldbcQuery8);
}
}
}
/**
* Complex Query 1
*/
public static class LdbcQuery1Handler implements OperationHandler<LdbcQuery1, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcQuery1 ldbcQuery1, GraknDbConnectionState dbConnectionState, ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graknTx = session.transaction(GraknTxType.READ)) {
Var anyone = var("anyone");
Var anyoneElse = var("anyoneElse");
// for speed fetch the Grakn id first
ConceptId graknPersonId = match($person.has(PERSON_ID, ldbcQuery1.personId())).withTx(graknTx).
get().execute().iterator().next().get($person).id();
// sort by lastname and then id
Comparator<ConceptMap> byLastNameAndId = Comparator.comparing(by($lastName)).thenComparing(by($friendId));
// This query has to be split into 3 parts, each fetching people a further distance away
// The longer queries only need be executed if there are not enough shorter queries
// The last ordering by id must be done after each query has been executed
Match match = match($person.id(graknPersonId),
var().rel($person).rel($friend).isa(KNOWS),
$friend.has(FIRST_NAME, ldbcQuery1.firstName()).
has(LAST_NAME, $lastName).
has(PERSON_ID, $friendId),
$person.neq($friend));
List<ConceptMap> distance1Result = match.withTx(graknTx).get().execute();
List<LdbcQuery1Result> distance1LdbcResult = populateResults(distance1Result.stream().sorted(byLastNameAndId), ldbcQuery1, graknTx, 1);
if (distance1Result.size() < ldbcQuery1.limit()) {
match = match($person.id(graknPersonId),
var().rel($person).rel(anyone).isa(KNOWS),
var().rel(anyone).rel($friend).isa(KNOWS),
$friend.has(FIRST_NAME, ldbcQuery1.firstName()).
has(LAST_NAME, $lastName).
has(PERSON_ID, $friendId),
$person.neq($friend)
);
List<ConceptMap> distance2Result = match.withTx(graknTx).get().execute();
distance1LdbcResult.addAll(populateResults(distance2Result.stream().sorted(byLastNameAndId), ldbcQuery1, graknTx, 2));
if (distance1Result.size() + distance2Result.size() < ldbcQuery1.limit()) {
match = match($person.id(graknPersonId),
var().rel($person).rel(anyone).isa(KNOWS),
var().rel(anyone).rel(anyoneElse).isa(KNOWS),
var().rel(anyoneElse).rel($friend).isa(KNOWS),
$friend.has(FIRST_NAME, ldbcQuery1.firstName()).
has(LAST_NAME, $lastName).
has(PERSON_ID, $friendId),
$person.neq($friend),
$friend.neq(anyone)
);
List<ConceptMap> distance3Result = match.withTx(graknTx).get().execute();
distance1LdbcResult.addAll(populateResults(distance3Result.stream().sorted(byLastNameAndId), ldbcQuery1, graknTx, 3));
}
}
resultReporter.report(0, distance1LdbcResult, ldbcQuery1);
}
}
/**
* Populate the LdbcQuery1Result object from graql results. As part of this extra queries are executed to fetch related information.
*
* @param graqlResults the graql results used to populate the ldbc results
* @param ldbcQuery1 the ldbc query parameters
* @param graknTx the graph for additional queries
* @param distance the number of knows relations between initial person and these results
* @return the ldbc results
*/
private static List<LdbcQuery1Result> populateResults(Stream<ConceptMap> graqlResults, LdbcQuery1 ldbcQuery1, GraknTx graknTx, int distance) {
return graqlResults.limit(ldbcQuery1.limit()).map(map -> {
// these queries get all of the additional related material, excluding resources
Var location = var("aLocation");
Match locationQuery = match(
$friend.id(map.get($friend).id()),
var().rel($friend).rel(location).isa(IS_LOCATED_IN));
ConceptMap locationResult = locationQuery.withTx(graknTx).get().execute().iterator().next();
Var year = var("aYear");
Var oganisation = var("aOrganisation");
Match universityQuery = match(
$friend.id(map.get($friend).id()),
var().rel($friend).rel(oganisation).isa(STUDY_AT).has(CLASS_YEAR, year),
var().rel(oganisation).rel(location).isa(IS_LOCATED_IN)
);
List<ConceptMap> universityResults = universityQuery.withTx(graknTx).get().execute();
List<List<Object>> universityProcessedResults = universityResults.stream().map(answer -> {
List<Object> result = new ArrayList<>();
result.add(getSingleResource(answer.get(oganisation).asEntity(), NAME, graknTx));
result.add(resource(answer, year));
result.add(getSingleResource(answer.get(location).asEntity(), NAME, graknTx));
return result;
}).collect(Collectors.toList());
Match workQuery = match(
$friend.id(map.get($friend).id()),
var().rel($friend).rel(oganisation).isa(WORK_AT).has(WORK_FROM, year),
var().rel(oganisation).rel(location).isa(IS_LOCATED_IN)
);
List<ConceptMap> workResults = workQuery.withTx(graknTx).get().execute();
List<List<Object>> workProcessedResults = workResults.stream().map(answer -> {
List<Object> result = new ArrayList<>();
result.add(getSingleResource(answer.get(oganisation).asEntity(), NAME, graknTx));
result.add(resource(answer, year));
result.add(getSingleResource(answer.get(location).asEntity(), NAME, graknTx));
return result;
}).collect(Collectors.toList());
// populate the result with resources using graphAPI and relations from additional info query
return new LdbcQuery1Result(
resource(map, $friendId),
resource(map, $lastName),
distance,
toEpoch(getSingleResource(map.get($friend).asEntity(), BIRTHDAY, graknTx)),
toEpoch(getSingleResource(map.get($friend).asEntity(), CREATION_DATE, graknTx)),
getSingleResource(map.get($friend).asEntity(), GENDER, graknTx),
getSingleResource(map.get($friend).asEntity(), BROWSER_USED, graknTx),
getSingleResource(map.get($friend).asEntity(), LOCATION_IP, graknTx),
getListResources(map.get($friend).asEntity(), EMAIL, graknTx),
getListResources(map.get($friend).asEntity(), SPEAKS, graknTx),
getSingleResource(locationResult.get(location).asEntity(), NAME, graknTx),
universityProcessedResults,
workProcessedResults);
}).collect(Collectors.toList());
}
private static <T> T getSingleResource(Entity entity, String resourceType, GraknTx graknTx) {
return (T) entity.attributes(graknTx.getAttributeType(resourceType)).
iterator().next().value();
}
private static <T> List<T> getListResources(Entity entity, String resourceType, GraknTx graknTx) {
Stream<Attribute<?>> rawResources = entity.attributes(graknTx.getAttributeType(resourceType));
return rawResources.map(resource -> (T) resource.value()).collect(Collectors.toList());
}
}
/**
* Complex Query 13
*/
public static class LdbcQuery13Handler implements OperationHandler<LdbcQuery13, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcQuery13 ldbcQuery13, GraknDbConnectionState dbConnectionState, ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graknTx = session.transaction(GraknTxType.READ)) {
Match match = match($person.has(PERSON_ID, ldbcQuery13.person1Id()));
Concept person1 = match.withTx(graknTx).get().execute().iterator().next().get($person);
match = match($person.has(PERSON_ID, ldbcQuery13.person2Id()));
Concept person2 = match.withTx(graknTx).get().execute().iterator().next().get($person);
List<ConceptList> paths = compute(PATH).from(person1.id()).to(person2.id())
.in("knows", "person").withTx(graknTx).execute();
List<ConceptId> path = Collections.emptyList();
if (!paths.isEmpty()) path = paths.get(0).list();
// our path is either:
// empty if there is none
// one if source = destination
// 2*l+1 where l is the length of the path
int l = path.size() - 1;
LdbcQuery13Result result;
if (l < 1) {
result = new LdbcQuery13Result(l);
} else {
result = new LdbcQuery13Result(l / 2);
}
resultReporter.report(0, result, ldbcQuery13);
}
}
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/GraknShortQueryHandlers.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import ai.grakn.concept.ConceptId;
import ai.grakn.graql.Order;
import ai.grakn.graql.Var;
import ai.grakn.graql.answer.ConceptMap;
import com.ldbc.driver.DbException;
import com.ldbc.driver.OperationHandler;
import com.ldbc.driver.ResultReporter;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery1PersonProfile;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery1PersonProfileResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery2PersonPosts;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery2PersonPostsResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery3PersonFriends;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery3PersonFriendsResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery4MessageContent;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery4MessageContentResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery5MessageCreator;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery5MessageCreatorResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery6MessageForum;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery6MessageForumResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery7MessageReplies;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery7MessageRepliesResult;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import static ai.grakn.SNB.$author;
import static ai.grakn.SNB.$author1;
import static ai.grakn.SNB.$author2;
import static ai.grakn.SNB.$authorId;
import static ai.grakn.SNB.$birthday;
import static ai.grakn.SNB.$browserUsed;
import static ai.grakn.SNB.$comment;
import static ai.grakn.SNB.$commentId;
import static ai.grakn.SNB.$content;
import static ai.grakn.SNB.$creationDate;
import static ai.grakn.SNB.$date;
import static ai.grakn.SNB.$firstName;
import static ai.grakn.SNB.$forum;
import static ai.grakn.SNB.$forumId;
import static ai.grakn.SNB.$friend;
import static ai.grakn.SNB.$friendId;
import static ai.grakn.SNB.$gender;
import static ai.grakn.SNB.$lastName;
import static ai.grakn.SNB.$locationIp;
import static ai.grakn.SNB.$message;
import static ai.grakn.SNB.$messageId;
import static ai.grakn.SNB.$mod;
import static ai.grakn.SNB.$modId;
import static ai.grakn.SNB.$opId;
import static ai.grakn.SNB.$originalPost;
import static ai.grakn.SNB.$person;
import static ai.grakn.SNB.$personId;
import static ai.grakn.SNB.$place;
import static ai.grakn.SNB.$placeId;
import static ai.grakn.SNB.$title;
import static ai.grakn.SNB.BIRTHDAY;
import static ai.grakn.SNB.BROWSER_USED;
import static ai.grakn.SNB.CHILD_MESSAGE;
import static ai.grakn.SNB.CONTENT;
import static ai.grakn.SNB.CREATION_DATE;
import static ai.grakn.SNB.CREATOR;
import static ai.grakn.SNB.FIRST_NAME;
import static ai.grakn.SNB.FORUM_ID;
import static ai.grakn.SNB.FORUM_MEMBER;
import static ai.grakn.SNB.FRIEND;
import static ai.grakn.SNB.GENDER;
import static ai.grakn.SNB.GROUP_FORUM;
import static ai.grakn.SNB.HAS_CREATOR;
import static ai.grakn.SNB.HAS_MODERATOR;
import static ai.grakn.SNB.IMAGE_FILE;
import static ai.grakn.SNB.IS_LOCATED_IN;
import static ai.grakn.SNB.KNOWS;
import static ai.grakn.SNB.LAST_NAME;
import static ai.grakn.SNB.LOCATED;
import static ai.grakn.SNB.LOCATION_IP;
import static ai.grakn.SNB.MEMBER_MESSAGE;
import static ai.grakn.SNB.MESSAGE;
import static ai.grakn.SNB.MESSAGE_ID;
import static ai.grakn.SNB.MODERATED;
import static ai.grakn.SNB.MODERATOR;
import static ai.grakn.SNB.ORIGINAL;
import static ai.grakn.SNB.ORIGINAL_POST;
import static ai.grakn.SNB.PARENT_MESSAGE;
import static ai.grakn.SNB.PERSON;
import static ai.grakn.SNB.PERSON_ID;
import static ai.grakn.SNB.PLACE_ID;
import static ai.grakn.SNB.POST;
import static ai.grakn.SNB.PRODUCT;
import static ai.grakn.SNB.REGION;
import static ai.grakn.SNB.REPLY;
import static ai.grakn.SNB.REPLY_OF;
import static ai.grakn.SNB.TITLE;
import static ai.grakn.SNB.by;
import static ai.grakn.SNB.has;
import static ai.grakn.SNB.key;
import static ai.grakn.SNB.resource;
import static ai.grakn.SNB.toEpoch;
import static ai.grakn.graql.Graql.var;
import static java.util.Comparator.comparing;
/**
* Implementations of the LDBC SNB short queries.
*
* @author sheldon, miko, felix
*/
public class GraknShortQueryHandlers {
private GraknShortQueryHandlers(){}
/**
* Short Query 1
*/
public static class LdbcShortQuery1PersonProfileHandler
implements OperationHandler<LdbcShortQuery1PersonProfile, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery1PersonProfile operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
Optional<ConceptMap> answer = graph.graql().match(
$person.has(PERSON_ID, operation.personId()),
var().rel($person).rel($firstName).isa(has(FIRST_NAME)),
var().rel($person).rel($lastName).isa(has(LAST_NAME)),
var().rel($person).rel($birthday).isa(has(BIRTHDAY)),
var().rel($person).rel($locationIp).isa(has(LOCATION_IP)),
var().rel($person).rel($browserUsed).isa(has(BROWSER_USED)),
var().rel($person).rel($gender).isa(has(GENDER)),
var().rel($person).rel($creationDate).isa(has(CREATION_DATE)),
var().rel(LOCATED, $person).rel(REGION, $place).isa(IS_LOCATED_IN),
var().rel($place).rel($placeId).isa(key(PLACE_ID))
).get().stream().findAny();
if (answer.isPresent()) {
ConceptMap fres = answer.get();
LdbcShortQuery1PersonProfileResult result =
new LdbcShortQuery1PersonProfileResult(
fres.get($firstName).<String>asAttribute().value(),
fres.get($lastName).<String>asAttribute().value(),
toEpoch(fres.get($birthday).<LocalDateTime>asAttribute().value()),
fres.get($locationIp).<String>asAttribute().value(),
fres.get($browserUsed).<String>asAttribute().value(),
fres.get($placeId).<Long>asAttribute().value(),
fres.get($gender).<String>asAttribute().value(),
toEpoch(fres.get($creationDate).<LocalDateTime>asAttribute().value()));
resultReporter.report(0, result, operation);
} else {
resultReporter.report(0, null, operation);
}
}
}
}
/**
* Short Query 2
*/
// The following requires a rule to properly work
public static class LdbcShortQuery2PersonPostsHandler implements
OperationHandler<LdbcShortQuery2PersonPosts, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery2PersonPosts operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> messageResults = graph.graql().match(
$person.isa(PERSON).has(PERSON_ID, operation.personId()),
var().rel(CREATOR, $person).rel(PRODUCT, $message).isa(HAS_CREATOR),
var().rel($message).rel($date).isa(has(CREATION_DATE)),
var().rel($message).rel($messageId).isa(key(MESSAGE_ID))
).orderBy($date, Order.desc).limit(operation.limit()).get().execute();
List<ConceptMap> allResults = new ArrayList<>();
messageResults.forEach(a -> {
List<ConceptMap> results = graph.graql().infer(true).match(
$message.id(a.get($message).id()),
var().rel($message).rel($date).isa(has(CREATION_DATE)),
var().rel($message).rel($messageId).isa(key(MESSAGE_ID)),
(var().rel($message).rel($content).isa(has(CONTENT))).or(var().rel($message).rel($content).isa(has(IMAGE_FILE))),
$originalPost.isa(POST),
var().rel(CHILD_MESSAGE, $message).rel(PARENT_MESSAGE, $originalPost).isa(ORIGINAL_POST),
var().rel($originalPost).rel($opId).isa(key(MESSAGE_ID)),
$author.isa(PERSON),
var().rel(PRODUCT, $originalPost).rel(CREATOR, $author).isa(HAS_CREATOR),
var().rel($author).rel($authorId).isa(key(PERSON_ID)),
var().rel($author).rel($firstName).isa(has(FIRST_NAME)),
var().rel($author).rel($lastName).isa(has(LAST_NAME))
).get().execute();
allResults.addAll(results);
});
List<LdbcShortQuery2PersonPostsResult> result = allResults.stream()
.sorted(comparing(by($date)).thenComparing(by($messageId)).reversed())
.map(map -> new LdbcShortQuery2PersonPostsResult(resource(map, $messageId),
resource(map, $content),
toEpoch(resource(map, $date)),
resource(map, $opId),
resource(map, $authorId),
resource(map, $firstName),
resource(map, $lastName)))
.collect(Collectors.toList());
resultReporter.report(0, result, operation);
}
}
}
/**
* Short Query 3
*/
public static class LdbcShortQuery3PersonFriendsHandler implements
OperationHandler<LdbcShortQuery3PersonFriends, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery3PersonFriends operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> results = graph.graql().match(
$person.has(PERSON_ID, operation.personId()),
var().rel($person).rel($friend).isa(KNOWS).has(CREATION_DATE, $date),
$friend.has(PERSON_ID, $friendId).has(FIRST_NAME, $firstName).has(LAST_NAME, $lastName)
).get().execute();
List<LdbcShortQuery3PersonFriendsResult> result = results.stream()
.sorted(comparing(by($date)).reversed().thenComparing(by($friendId)))
.map(map -> new LdbcShortQuery3PersonFriendsResult(
resource(map, $friendId),
resource(map, $firstName),
resource(map, $lastName),
toEpoch(resource(map, $date))
)).collect(Collectors.toList());
resultReporter.report(0, result, operation);
}
}
}
/**
* Short Query 4
*/
public static class LdbcShortQuery4MessageContentHandler implements
OperationHandler<LdbcShortQuery4MessageContent, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery4MessageContent operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> results = graph.graql().match(
$message.has(MESSAGE_ID, operation.messageId()),
var().rel($message).rel($date).isa(has(CREATION_DATE)),
(var().rel($message).rel($content).isa(has(CONTENT))).or(var().rel($message).rel($content).isa(has(IMAGE_FILE)))
).get().execute();
if (!results.isEmpty()) {
ConceptMap fres = results.get(0);
LdbcShortQuery4MessageContentResult result = new LdbcShortQuery4MessageContentResult(
resource(fres, $content),
toEpoch(resource(fres, $date))
);
resultReporter.report(0, result, operation);
} else {
resultReporter.report(0, null, operation);
}
}
}
}
/**
* Short Query 5
*/
public static class LdbcShortQuery5MessageCreatorHandler implements
OperationHandler<LdbcShortQuery5MessageCreator, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery5MessageCreator operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> results = graph.graql().match(
$message.has(MESSAGE_ID, operation.messageId()),
var().rel(PRODUCT, $message).rel(CREATOR, $person).isa(HAS_CREATOR),
var().rel($person).rel($firstName).isa(has(FIRST_NAME)),
var().rel($person).rel($lastName).isa(has(LAST_NAME)),
var().rel($person).rel($personId).isa(key(PERSON_ID))
).get().execute();
if (!results.isEmpty()) {
ConceptMap fres = results.get(0);
LdbcShortQuery5MessageCreatorResult result = new LdbcShortQuery5MessageCreatorResult(
resource(fres, $personId),
resource(fres, $firstName),
resource(fres, $lastName)
);
resultReporter.report(0, result, operation);
} else {
resultReporter.report(0, null, operation);
}
}
}
}
/**
* Short Query 6
*/
// The following requires a rule to properly work
public static class LdbcShortQuery6MessageForumHandler implements
OperationHandler<LdbcShortQuery6MessageForum, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery6MessageForum operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> results = graph.graql().infer(true).match(
$message.has(MESSAGE_ID, operation.messageId()),
var().rel(MEMBER_MESSAGE, $message).rel(GROUP_FORUM, $forum).isa(FORUM_MEMBER),
$forum.has(FORUM_ID, $forumId).has(TITLE, $title),
var().rel(MODERATED, $forum).rel(MODERATOR, $mod).isa(HAS_MODERATOR),
$mod.isa(PERSON).has(PERSON_ID, $modId).has(FIRST_NAME, $firstName).has(LAST_NAME, $lastName)
).get().execute();
if (!results.isEmpty()) {
ConceptMap fres = results.get(0);
LdbcShortQuery6MessageForumResult result = new LdbcShortQuery6MessageForumResult(
resource(fres, $forumId),
resource(fres, $title),
resource(fres, $modId),
resource(fres, $firstName),
resource(fres, $lastName)
);
resultReporter.report(0, result, operation);
} else {
resultReporter.report(0, null, operation);
}
}
}
}
/**
* Short Query 7
*/
public static class LdbcShortQuery7MessageRepliesHandler implements
OperationHandler<LdbcShortQuery7MessageReplies, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcShortQuery7MessageReplies operation,
GraknDbConnectionState dbConnectionState,
ResultReporter resultReporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.READ)) {
List<ConceptMap> results = graph.graql().match(
$message.isa(MESSAGE).has(MESSAGE_ID, operation.messageId()),
var().rel(PRODUCT, $message).rel(CREATOR, $author1).isa(HAS_CREATOR),
var().rel(ORIGINAL, $message).rel(REPLY, $comment).isa(REPLY_OF),
var().rel($comment).rel($commentId).isa(key(MESSAGE_ID)),
var().rel($comment).rel($content).isa(has(CONTENT)),
var().rel($comment).rel($date).isa(has(CREATION_DATE)),
var().rel(PRODUCT, $comment).rel(CREATOR, $author2).isa(HAS_CREATOR),
var().rel($author2).rel($personId).isa(key(PERSON_ID)),
var().rel($author2).rel($firstName).isa(has(FIRST_NAME)),
var().rel($author2).rel($lastName).isa(has(LAST_NAME))
).get().execute();
List<LdbcShortQuery7MessageRepliesResult> result = results.stream()
.sorted(comparing(by($date)).reversed().thenComparing(by($personId)))
.map(map -> new LdbcShortQuery7MessageRepliesResult(
resource(map, $commentId),
resource(map, $content),
toEpoch(resource(map, $date)),
resource(map, $personId),
resource(map, $firstName),
resource(map, $lastName),
checkIfFriends(conceptId(map, $author1), conceptId(map, $author2), graph)))
.collect(Collectors.toList());
resultReporter.report(0, result, operation);
}
}
private boolean checkIfFriends(ConceptId author1, ConceptId author2, GraknTx graph) {
return graph.graql().match(
var().rel(FRIEND, var().id(author1)).rel(FRIEND, var().id(author2)).isa(KNOWS)
).stream().findAny().isPresent();
}
private ConceptId conceptId(ConceptMap result, Var var) {
return result.get(var).id();
}
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/GraknUpdateQueryHandlers.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import ai.grakn.graql.Var;
import ai.grakn.graql.VarPattern;
import com.google.common.collect.ImmutableSet;
import com.ldbc.driver.DbException;
import com.ldbc.driver.OperationHandler;
import com.ldbc.driver.ResultReporter;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcNoResult;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate1AddPerson;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate2AddPostLike;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate3AddCommentLike;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate4AddForum;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate5AddForumMembership;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate6AddPost;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate7AddComment;
import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate8AddFriendship;
import static ai.grakn.SNB.$author;
import static ai.grakn.SNB.$city;
import static ai.grakn.SNB.$comment;
import static ai.grakn.SNB.$country;
import static ai.grakn.SNB.$forum;
import static ai.grakn.SNB.$message;
import static ai.grakn.SNB.$mod;
import static ai.grakn.SNB.$original;
import static ai.grakn.SNB.$person;
import static ai.grakn.SNB.$post;
import static ai.grakn.SNB.ADMIRER;
import static ai.grakn.SNB.BIRTHDAY;
import static ai.grakn.SNB.BROWSER_USED;
import static ai.grakn.SNB.CLASS_YEAR;
import static ai.grakn.SNB.COMMENT;
import static ai.grakn.SNB.COMPANY;
import static ai.grakn.SNB.CONTAINED;
import static ai.grakn.SNB.CONTAINER;
import static ai.grakn.SNB.CONTAINER_OF;
import static ai.grakn.SNB.CONTENT;
import static ai.grakn.SNB.CREATION_DATE;
import static ai.grakn.SNB.CREATOR;
import static ai.grakn.SNB.EMAIL;
import static ai.grakn.SNB.EMPLOYEE;
import static ai.grakn.SNB.EMPLOYER;
import static ai.grakn.SNB.FIRST_NAME;
import static ai.grakn.SNB.FORUM;
import static ai.grakn.SNB.FORUM_ID;
import static ai.grakn.SNB.FRIEND;
import static ai.grakn.SNB.GENDER;
import static ai.grakn.SNB.GROUP;
import static ai.grakn.SNB.HAS_CREATOR;
import static ai.grakn.SNB.HAS_INTEREST;
import static ai.grakn.SNB.HAS_MEMBER;
import static ai.grakn.SNB.HAS_MODERATOR;
import static ai.grakn.SNB.HAS_TAG;
import static ai.grakn.SNB.IMAGE_FILE;
import static ai.grakn.SNB.INTEREST;
import static ai.grakn.SNB.INTERESTED;
import static ai.grakn.SNB.IS_LOCATED_IN;
import static ai.grakn.SNB.JOIN_DATE;
import static ai.grakn.SNB.KNOWS;
import static ai.grakn.SNB.LANGUAGE;
import static ai.grakn.SNB.LAST_NAME;
import static ai.grakn.SNB.LENGTH;
import static ai.grakn.SNB.LIKE;
import static ai.grakn.SNB.LIKES;
import static ai.grakn.SNB.LOCATED;
import static ai.grakn.SNB.LOCATION_IP;
import static ai.grakn.SNB.MEMBER;
import static ai.grakn.SNB.MESSAGE_ID;
import static ai.grakn.SNB.MODERATED;
import static ai.grakn.SNB.MODERATOR;
import static ai.grakn.SNB.ORGANISATION_ID;
import static ai.grakn.SNB.ORIGINAL;
import static ai.grakn.SNB.PERSON;
import static ai.grakn.SNB.PERSON_ID;
import static ai.grakn.SNB.PLACE_ID;
import static ai.grakn.SNB.POST;
import static ai.grakn.SNB.PRODUCT;
import static ai.grakn.SNB.REGION;
import static ai.grakn.SNB.REPLY;
import static ai.grakn.SNB.REPLY_OF;
import static ai.grakn.SNB.SCHOOL;
import static ai.grakn.SNB.SPEAKS;
import static ai.grakn.SNB.STUDENT;
import static ai.grakn.SNB.STUDY_AT;
import static ai.grakn.SNB.TAG;
import static ai.grakn.SNB.TAGGED;
import static ai.grakn.SNB.TAG_ID;
import static ai.grakn.SNB.TITLE;
import static ai.grakn.SNB.TOPIC;
import static ai.grakn.SNB.UNIVERSITY;
import static ai.grakn.SNB.WORK_AT;
import static ai.grakn.SNB.WORK_FROM;
import static ai.grakn.SNB.fromDate;
import static ai.grakn.graql.Graql.var;
/**
* Implementations of the LDBC SNB Update Queries
*
* @author sheldon, miko
*/
public class GraknUpdateQueryHandlers {
private GraknUpdateQueryHandlers(){}
/**
* Update Query 1
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate1AddPersonHandler implements OperationHandler<LdbcUpdate1AddPerson, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate1AddPerson operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
ImmutableSet.Builder<VarPattern> match = ImmutableSet.builder();
ImmutableSet.Builder<VarPattern> insert = ImmutableSet.builder();
match.add($city.has(PLACE_ID, operation.cityId()));
for (Long theTag : operation.tagIds()) {
Var tag = var(theTag.toString());
match.add(tag.isa(TAG).has(TAG_ID, theTag));
insert.add(var().rel(INTERESTED, $person).rel(INTEREST, tag).isa(HAS_INTEREST));
}
for (LdbcUpdate1AddPerson.Organization org : operation.studyAt()) {
Var organisation = var(Long.toString(org.organizationId()));
match.add(organisation.isa(UNIVERSITY).has(ORGANISATION_ID, org.organizationId()));
insert.add(var().rel(STUDENT, $person).rel(SCHOOL, organisation).isa(STUDY_AT).has(CLASS_YEAR, org.year()));
}
for (LdbcUpdate1AddPerson.Organization org : operation.workAt()) {
Var organisation = var(Long.toString(org.organizationId()));
match.add(organisation.isa(COMPANY).has(ORGANISATION_ID, org.organizationId()));
insert.add(var().rel(EMPLOYEE, $person).rel(EMPLOYER, organisation).isa(WORK_AT).has(WORK_FROM, org.year()));
}
insert.add($person.isa(PERSON)
.has(PERSON_ID, operation.personId())
.has(FIRST_NAME, operation.personFirstName())
.has(LAST_NAME, operation.personLastName())
.has(BIRTHDAY, fromDate(operation.birthday()))
.has(CREATION_DATE, fromDate(operation.creationDate()))
.has(LOCATION_IP, operation.locationIp())
.has(BROWSER_USED, operation.browserUsed())
.has(GENDER, operation.gender()));
for (String language : operation.languages()) {
insert.add($person.has(SPEAKS, language));
}
for (String theEmail : operation.emails()) {
insert.add($person.has(EMAIL, theEmail));
}
insert.add(var().rel(LOCATED, $person).rel(REGION, $city).isa(IS_LOCATED_IN));
graph.graql().match(match.build()).insert(insert.build()).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 2
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate2AddPostLikeHandler implements OperationHandler<LdbcUpdate2AddPostLike, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate2AddPostLike operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
graph.graql().match(
$person.has(PERSON_ID, operation.personId()),
$message.has(MESSAGE_ID, operation.postId())
).insert(var()
.rel(ADMIRER, $person).rel(LIKE, $message).isa(LIKES)
.has(CREATION_DATE, fromDate(operation.creationDate()))
).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 3
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate3AddCommentLikeHandler implements OperationHandler<LdbcUpdate3AddCommentLike, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate3AddCommentLike operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
graph.graql().match(
$person.has(PERSON_ID, operation.personId()),
$message.has(MESSAGE_ID, operation.commentId())
).insert(var()
.rel(ADMIRER, $person).rel(LIKE, $message).isa(LIKES)
.has(CREATION_DATE, fromDate(operation.creationDate()))
).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 4
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate4AddForumHandler implements OperationHandler<LdbcUpdate4AddForum, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate4AddForum operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
ImmutableSet.Builder<VarPattern> match = ImmutableSet.builder();
ImmutableSet.Builder<VarPattern> insert = ImmutableSet.builder();
match.add($mod.has(PERSON_ID, operation.moderatorPersonId()));
for (long tagId : operation.tagIds()) {
Var tag = var(Long.toString(tagId));
match.add(tag.has(TAG_ID, tagId));
insert.add(var().rel(TAGGED, $forum).rel(TOPIC, tag).isa(HAS_TAG));
}
insert.add($forum.isa(FORUM)
.has(FORUM_ID, operation.forumId())
.has(TITLE, operation.forumTitle())
.has(CREATION_DATE, fromDate(operation.creationDate()))
);
insert.add(var().rel(MODERATOR, $mod).rel(MODERATED, $forum).isa(HAS_MODERATOR));
graph.graql().match(match.build()).insert(insert.build()).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 5
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate5AddForumMembershipHandler implements OperationHandler<LdbcUpdate5AddForumMembership, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate5AddForumMembership operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
graph.graql().match(
$forum.has(FORUM_ID, operation.forumId()),
$person.has(PERSON_ID, operation.personId())
).insert(var()
.rel(MEMBER, $person).rel(GROUP, $forum).isa(HAS_MEMBER)
.has(JOIN_DATE, fromDate(operation.joinDate()))
).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 6
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate6AddPostHandler implements OperationHandler<LdbcUpdate6AddPost, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate6AddPost operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
ImmutableSet.Builder<VarPattern> match = ImmutableSet.builder();
ImmutableSet.Builder<VarPattern> insert = ImmutableSet.builder();
match.add(
$author.has(PERSON_ID, operation.authorPersonId()),
$forum.has(FORUM_ID, operation.forumId()),
$country.has(PLACE_ID, operation.countryId())
);
for (long tagId : operation.tagIds()) {
Var tag = var(Long.toString(tagId));
match.add(tag.has(TAG_ID, tagId));
insert.add(var().rel(TAGGED, $post).rel(TOPIC, tag).isa(HAS_TAG));
}
insert.add($post.isa(POST).has(MESSAGE_ID, operation.postId())
.has(LOCATION_IP, operation.locationIp())
.has(BROWSER_USED, operation.browserUsed())
.has(LENGTH, operation.length())
.has(CREATION_DATE, fromDate(operation.creationDate())));
if (operation.language().length() > 0) {
insert.add($post.has(LANGUAGE, operation.language()));
}
if (operation.imageFile().length() > 0) {
insert.add($post.has(IMAGE_FILE, operation.imageFile()));
} else {
insert.add($post.has(CONTENT, operation.content()));
}
insert.add(
var().rel(PRODUCT, $post).rel(CREATOR, $author).isa(HAS_CREATOR),
var().rel(LOCATED, $post).rel(REGION, $country).isa(IS_LOCATED_IN),
var().rel(CONTAINED, $post).rel(CONTAINER, $forum).isa(CONTAINER_OF)
);
graph.graql().match(match.build()).insert(insert.build()).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 7
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate7AddCommentHandler implements OperationHandler<LdbcUpdate7AddComment, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate7AddComment operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
ImmutableSet.Builder<VarPattern> match = ImmutableSet.builder();
ImmutableSet.Builder<VarPattern> insert = ImmutableSet.builder();
match.add($author.has(PERSON_ID, operation.authorPersonId()));
if (operation.replyToPostId() != -1) {
match.add($original.has(MESSAGE_ID, operation.replyToPostId()));
} else {
match.add($original.has(MESSAGE_ID, operation.replyToCommentId()));
}
match.add($country.has(PLACE_ID, operation.countryId()));
for (long tagId : operation.tagIds()) {
Var tag = var(Long.toString(tagId));
match.add(tag.has(TAG_ID, tagId));
insert.add(var().rel(TAGGED, $comment).rel(TOPIC, tag).isa(HAS_TAG));
}
insert.add(
$comment.isa(COMMENT).has(MESSAGE_ID, operation.commentId())
.has(CONTENT, operation.content())
.has(LOCATION_IP, operation.locationIp())
.has(BROWSER_USED, operation.browserUsed())
.has(CREATION_DATE, fromDate(operation.creationDate()))
.has(LENGTH, operation.length()),
var().rel(PRODUCT, $comment).rel(CREATOR, $author).isa(HAS_CREATOR),
var().rel(LOCATED, $comment).rel(REGION, $country).isa(IS_LOCATED_IN),
var().rel(REPLY, $comment).rel(ORIGINAL, $original).isa(REPLY_OF)
);
graph.graql().match(match.build()).insert(insert.build()).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
/**
* Update Query 8
*/
@SuppressWarnings("unused") //Called through SNB validation
public static class LdbcUpdate8AddFriendshipHandler implements OperationHandler<LdbcUpdate8AddFriendship, GraknDbConnectionState> {
@Override
public void executeOperation(LdbcUpdate8AddFriendship operation,
GraknDbConnectionState dbConnectionState,
ResultReporter reporter) throws DbException {
GraknSession session = dbConnectionState.session();
try (GraknTx graph = session.transaction(GraknTxType.WRITE)) {
Var person1 = var("person1");
Var person2 = var("person2");
graph.graql().match(
person1.has(PERSON_ID, operation.person1Id()),
person2.has(PERSON_ID, operation.person2Id())
).insert(var()
.rel(FRIEND, person1).rel(FRIEND, person2).isa(KNOWS)
.has(CREATION_DATE, fromDate(operation.creationDate()))
).execute();
graph.commit();
reporter.report(0, LdbcNoResult.INSTANCE, operation);
}
}
}
}
|
0
|
java-sources/ai/grakn/test-snb/1.4.3/ai
|
java-sources/ai/grakn/test-snb/1.4.3/ai/grakn/SNB.java
|
/*
* GRAKN.AI - THE KNOWLEDGE GRAPH
* Copyright (C) 2018 Grakn Labs Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package ai.grakn;
import ai.grakn.graql.Var;
import ai.grakn.graql.VarPattern;
import ai.grakn.graql.answer.ConceptMap;
import ai.grakn.util.Schema;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.Date;
import java.util.function.Function;
import static ai.grakn.graql.Graql.label;
import static ai.grakn.graql.Graql.var;
/**
* @author Felix Chapman
*/
public class SNB {
static final String PLACE_ID = "place-id";
static final String TAG_ID = "tag-id";
static final String ORGANISATION_ID = "organisation-id";
static final String CLASS_YEAR = "class-year";
static final String WORK_FROM = "work-from";
static final String PERSON_ID = "person-id";
static final String MESSAGE_ID = "message-id";
static final String FORUM_ID = "forum-id";
static final String FIRST_NAME = "first-name";
static final String LAST_NAME = "last-name";
static final String BIRTHDAY = "birth-day";
static final String CREATION_DATE = "creation-date";
static final String LOCATION_IP = "location-ip";
static final String BROWSER_USED = "browser-used";
static final String GENDER = "gender";
static final String SPEAKS = "speaks";
static final String EMAIL = "email";
static final String TITLE = "title";
static final String JOIN_DATE = "join-date";
static final String LENGTH = "length";
static final String LANGUAGE = "language";
static final String IMAGE_FILE = "image-file";
static final String CONTENT = "content";
static final String NAME = "name";
static final VarPattern FORUM = label("forum");
static final VarPattern TAG = label("tag");
static final VarPattern UNIVERSITY = label("university");
static final VarPattern COMPANY = label("company");
static final VarPattern PERSON = label("person");
static final VarPattern INTERESTED = label("interested");
static final VarPattern INTEREST = label("interest");
static final VarPattern HAS_INTEREST = label("has-interest");
static final VarPattern STUDENT = label("student");
static final VarPattern SCHOOL = label("school");
static final VarPattern STUDY_AT = label("study-at");
static final VarPattern EMPLOYEE = label("employee");
static final VarPattern EMPLOYER = label("employer");
static final VarPattern WORK_AT = label("work-at");
static final VarPattern LOCATED = label("located");
static final VarPattern REGION = label("region");
static final VarPattern IS_LOCATED_IN = label("is-located-in");
static final VarPattern ADMIRER = label("admirer");
static final VarPattern LIKE = label("like");
static final VarPattern LIKES = label("likes");
static final VarPattern TAGGED = label("tagged");
static final VarPattern TOPIC = label("topic");
static final VarPattern HAS_TAG = label("has-tag");
static final VarPattern MODERATOR = label("moderator");
static final VarPattern MODERATED = label("moderated");
static final VarPattern HAS_MODERATOR = label("has-moderator");
static final VarPattern MEMBER = label("member");
static final VarPattern GROUP = label("group");
static final VarPattern HAS_MEMBER = label("has-member");
static final VarPattern POST = label("post");
static final VarPattern PRODUCT = label("product");
static final VarPattern CREATOR = label("creator");
static final VarPattern HAS_CREATOR = label("has-creator");
static final VarPattern CONTAINED = label("contained");
static final VarPattern CONTAINER = label("container");
static final VarPattern CONTAINER_OF = label("container-of");
static final VarPattern COMMENT = label("comment");
static final VarPattern REPLY = label("reply");
static final VarPattern ORIGINAL = label("original");
static final VarPattern REPLY_OF = label("reply-of");
static final VarPattern FRIEND = label("friend");
static final VarPattern KNOWS = label("knows");
static final VarPattern CHILD_MESSAGE = label("child-message");
static final VarPattern PARENT_MESSAGE = label("parent-message");
static final VarPattern ORIGINAL_POST = label("original-post");
static final VarPattern MEMBER_MESSAGE = label("member-message");
static final VarPattern GROUP_FORUM = label("group-forum");
static final VarPattern FORUM_MEMBER = label("forum-member");
static final VarPattern MESSAGE = label("message");
static final Var $person = var("person");
static final Var $city = var("city");
static final Var $message = var("message");
static final Var $mod = var("mod");
static final Var $modId = var("modId");
static final Var $forum = var("forum");
static final Var $author = var("author");
static final Var $author1 = var("author1");
static final Var $author2 = var("author2");
static final Var $country = var("country");
static final Var $post = var("post");
static final Var $original = var("original");
static final Var $comment = var("comment");
static final Var $commentId = var("commentId");
static final Var $firstName = var(FIRST_NAME);
static final Var $lastName = var(LAST_NAME);
static final Var $birthday = var("birthday");
static final Var $locationIp = var(LOCATION_IP);
static final Var $browserUsed = var(BROWSER_USED);
static final Var $gender = var(GENDER);
static final Var $creationDate = var(CREATION_DATE);
static final Var $place = var("place");
static final Var $placeId = var("placeID");
static final Var $date = var("date");
static final Var $messageId = var("messageId");
static final Var $content = var(CONTENT);
static final Var $originalPost = var("originalPost");
static final Var $opId = var("opId");
static final Var $authorId = var("authorId");
static final Var $friend = var("friend");
static final Var $friendId = var("friendId");
static final Var $personId = var("personId");
static final Var $forumId = var("forumId");
static final Var $title = var(TITLE);
private SNB(){}
static VarPattern has(String label) {
return label(Schema.ImplicitType.HAS.getLabel(label));
}
static VarPattern key(String label) {
return label(Schema.ImplicitType.KEY.getLabel(label));
}
static LocalDateTime fromDate(Date date) {
return LocalDateTime.ofInstant(Instant.ofEpochMilli(date.getTime()), ZoneOffset.UTC);
}
static long toEpoch(LocalDateTime localDateTime) {
return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
}
static <T> Function<ConceptMap, T> by(Var var) {
return map -> resource(map, var);
}
static <T> T resource(ConceptMap result, Var var) {
return result.get(var).<T>asAttribute().value();
}
}
|
0
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/factory/TitanHadoopInternalFactory.java
|
/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*/
package ai.grakn.factory;
import ai.grakn.graph.internal.AbstractGraknGraph;
import ai.grakn.util.ErrorMessage;
import org.apache.tinkerpop.gremlin.hadoop.structure.HadoopGraph;
import org.apache.tinkerpop.gremlin.structure.util.GraphFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Properties;
/**
* <p>
* A Grakn Graph on top of {@link HadoopGraph}
* </p>
*
* <p>
* This produces a graph on top of {@link HadoopGraph}.
* The base construction process defined by {@link AbstractInternalFactory} ensures the graph factories are singletons.
* With this vendor some exceptions are in places:
* 1. The Grakn API cannnot work on {@link HadoopGraph} this is due to not being able to directly write to a
* {@link HadoopGraph}.
* 2. This factory primarily exists as a means of producing a
* {@link org.apache.tinkerpop.gremlin.process.computer.GraphComputer} on of {@link HadoopGraph}
* </p>
*
* @author fppt
*/
public class TitanHadoopInternalFactory extends AbstractInternalFactory<AbstractGraknGraph<HadoopGraph>, HadoopGraph> {
private static final String CLUSTER_KEYSPACE = "titanmr.ioformat.conf.storage.cassandra.keyspace";
private static final String INPUT_KEYSPACE = "cassandra.input.keyspace";
private final Logger LOG = LoggerFactory.getLogger(TitanHadoopInternalFactory.class);
TitanHadoopInternalFactory(String keyspace, String engineUrl, Properties properties) {
super(keyspace, engineUrl, properties);
properties.setProperty(CLUSTER_KEYSPACE, keyspace);
properties.setProperty(INPUT_KEYSPACE, keyspace);
}
@Override
AbstractGraknGraph<HadoopGraph> buildGraknGraphFromTinker(HadoopGraph graph) {
throw new UnsupportedOperationException(ErrorMessage.CANNOT_PRODUCE_GRAPH.getMessage(HadoopGraph.class.getName()));
}
@Override
HadoopGraph buildTinkerPopGraph(boolean batchLoading) {
LOG.warn("Hadoop graph ignores parameter address [" + super.engineUrl + "]");
return (HadoopGraph) GraphFactory.open(properties);
}
//TODO: Get rid of the need for batch loading parameter
@Override
protected HadoopGraph getGraphWithNewTransaction(HadoopGraph graph, boolean batchloading) {
return graph;
}
}
|
0
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/factory/TitanInternalFactory.java
|
/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*/
package ai.grakn.factory;
import ai.grakn.graph.internal.GraknTitanGraph;
import ai.grakn.util.ErrorMessage;
import ai.grakn.util.Schema;
import com.thinkaurelius.titan.core.EdgeLabel;
import com.thinkaurelius.titan.core.Namifiable;
import com.thinkaurelius.titan.core.PropertyKey;
import com.thinkaurelius.titan.core.RelationType;
import com.thinkaurelius.titan.core.TitanFactory;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.VertexLabel;
import com.thinkaurelius.titan.core.schema.TitanIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement;
import com.thinkaurelius.titan.graphdb.database.StandardTitanGraph;
import com.thinkaurelius.titan.graphdb.transaction.StandardTitanTx;
import org.apache.tinkerpop.gremlin.process.traversal.Order;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.apache.tinkerpop.gremlin.structure.Transaction;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import static java.util.Arrays.stream;
/**
* <p>
* A Grakn Graph on top of {@link TitanGraph}
* </p>
*
* <p>
* This produces a grakn graph on top of {@link TitanGraph}.
* The base construction process defined by {@link AbstractInternalFactory} ensures the graph factories are singletons.
* </p>
*
* @author fppt
*/
final public class TitanInternalFactory extends AbstractInternalFactory<GraknTitanGraph, TitanGraph> {
private final static String DEFAULT_CONFIG = "backend-default";
private static final AtomicBoolean strategiesApplied = new AtomicBoolean(false);
TitanInternalFactory(String keyspace, String engineUrl, Properties properties) {
super(keyspace, engineUrl, properties);
}
@Override
public TitanGraph getGraphWithNewTransaction(TitanGraph graph, boolean batchloading){
if(graph.isClosed()) graph = buildTinkerPopGraph(batchloading);
if(!graph.tx().isOpen()){
graph.tx().open();
}
return graph;
}
@Override
GraknTitanGraph buildGraknGraphFromTinker(TitanGraph graph) {
return new GraknTitanGraph(graph, super.keyspace, super.engineUrl, super.properties);
}
@Override
TitanGraph buildTinkerPopGraph(boolean batchLoading) {
return newTitanGraph(super.keyspace, super.engineUrl, super.properties, batchLoading);
}
private synchronized TitanGraph newTitanGraph(String name, String address, Properties properties, boolean batchLoading){
TitanGraph titanGraph = configureGraph(name, address, properties, batchLoading);
buildTitanIndexes(titanGraph);
titanGraph.tx().onClose(Transaction.CLOSE_BEHAVIOR.ROLLBACK);
if (!strategiesApplied.getAndSet(true)) {
TraversalStrategies strategies = TraversalStrategies.GlobalCache.getStrategies(StandardTitanGraph.class);
strategies = strategies.clone().addStrategies(new TitanPreviousPropertyStepStrategy());
TraversalStrategies.GlobalCache.registerStrategies(StandardTitanGraph.class, strategies);
TraversalStrategies.GlobalCache.registerStrategies(StandardTitanTx.class, strategies);
}
return titanGraph;
}
private TitanGraph configureGraph(String name, String address, Properties properties, boolean batchLoading){
//Load default properties if none provided
if(properties == null){
properties = new Properties();
try (InputStream in = getClass().getResourceAsStream(DEFAULT_CONFIG)) {
properties.load(in);
in.close();
} catch (IOException e) {
throw new RuntimeException(ErrorMessage.INVALID_PATH_TO_CONFIG.getMessage(DEFAULT_CONFIG), e);
}
}
TitanFactory.Builder builder = TitanFactory.build().
set("storage.hostname", address).
set("storage.cassandra.keyspace", name).
set("storage.batch-loading", batchLoading);
properties.entrySet().forEach(entry-> builder.set(entry.getKey().toString(), entry.getValue()));
return builder.open();
}
private static void buildTitanIndexes(TitanGraph graph) {
TitanManagement management = graph.openManagement();
makeVertexLabels(management);
makeEdgeLabels(management);
makePropertyKeys(management);
makeIndicesVertexCentric(management);
makeIndicesComposite(management);
management.commit();
}
private static void makeEdgeLabels(TitanManagement management){
for (Schema.EdgeLabel edgeLabel : Schema.EdgeLabel.values()) {
EdgeLabel label = management.getEdgeLabel(edgeLabel.getLabel());
if(label == null) {
management.makeEdgeLabel(edgeLabel.getLabel()).make();
}
}
}
private static void makeVertexLabels(TitanManagement management){
for (Schema.BaseType baseType : Schema.BaseType.values()) {
VertexLabel foundLabel = management.getVertexLabel(baseType.name());
if(foundLabel == null) {
management.makeVertexLabel(baseType.name()).make();
}
}
}
private static void makeIndicesVertexCentric(TitanManagement management){
ResourceBundle keys = ResourceBundle.getBundle("indices-edges");
Set<String> edgeLabels = keys.keySet();
for(String edgeLabel : edgeLabels){
String[] propertyKeyStrings = keys.getString(edgeLabel).split(",");
//Get all the property keys we need
Set<PropertyKey> propertyKeys = stream(propertyKeyStrings).map(keyId ->{
PropertyKey key = management.getPropertyKey(keyId);
if (key == null) {
throw new RuntimeException("Trying to create edge index on label [" + edgeLabel + "] but the property [" + keyId + "] does not exist");
}
return key;
}).collect(Collectors.toSet());
//Get the edge and indexing information
RelationType relationType = management.getRelationType(edgeLabel);
EdgeLabel label = management.getEdgeLabel(edgeLabel);
//Create index on each property key
for (PropertyKey key : propertyKeys) {
if (management.getRelationIndex(relationType, edgeLabel + "by" + key.name()) == null) {
management.buildEdgeIndex(label, edgeLabel + "by" + key.name(), Direction.BOTH, Order.decr, key);
}
}
//Create index on all property keys
String propertyKeyId = propertyKeys.stream().map(Namifiable::name).collect(Collectors.joining("_"));
if (management.getRelationIndex(relationType, edgeLabel + "by" + propertyKeyId) == null) {
PropertyKey [] allKeys = propertyKeys.toArray(new PropertyKey[propertyKeys.size()]);
management.buildEdgeIndex(label, edgeLabel + "by" + propertyKeyId, Direction.BOTH, Order.decr, allKeys);
}
}
}
private static void makePropertyKeys(TitanManagement management){
stream(Schema.VertexProperty.values()).forEach(property ->
makePropertyKey(management, property.name(), property.getDataType()));
stream(Schema.EdgeProperty.values()).forEach(property ->
makePropertyKey(management, property.name(), property.getDataType()));
}
private static void makePropertyKey(TitanManagement management, String propertyKey, Class type){
if (management.getPropertyKey(propertyKey) == null) {
management.makePropertyKey(propertyKey).dataType(type).make();
}
}
private static void makeIndicesComposite(TitanManagement management){
ResourceBundle keys = ResourceBundle.getBundle("indices-composite");
Set<String> keyString = keys.keySet();
for(String propertyKeyLabel : keyString){
String indexLabel = "by" + propertyKeyLabel;
TitanIndex index = management.getGraphIndex(indexLabel);
if(index == null) {
boolean isUnique = Boolean.parseBoolean(keys.getString(propertyKeyLabel));
PropertyKey key = management.getPropertyKey(propertyKeyLabel);
TitanManagement.IndexBuilder indexBuilder = management.buildIndex(indexLabel, Vertex.class).addKey(key);
if (isUnique) {
indexBuilder.unique();
}
indexBuilder.buildCompositeIndex();
}
}
}
}
|
0
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/factory/TitanPreviousPropertyStep.java
|
/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
*/
package ai.grakn.factory;
import com.thinkaurelius.titan.core.TitanTransaction;
import com.thinkaurelius.titan.core.TitanVertex;
import com.thinkaurelius.titan.graphdb.tinkerpop.optimize.TitanTraversalUtil;
import org.apache.tinkerpop.gremlin.process.traversal.Pop;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.step.Scoping;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.FlatMapStep;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement;
import org.apache.tinkerpop.gremlin.structure.util.StringFactory;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Objects;
import java.util.Set;
import static java.util.Collections.emptyIterator;
/**
* Optimise a particular traversal in Titan:
* <p>
* <code>
* g.V().outE().values(c).as(b).V().filter(__.properties(a).where(P.eq(b)));
* </code>
* <p>
* This step can be used in place of {@code V().filter(..)} since we are referring to a previously visited property in
* the traversal.
*
* @author Felix Chapman
*/
class TitanPreviousPropertyStep<S> extends FlatMapStep<S, TitanVertex> implements Scoping {
private static final long serialVersionUID = -8906462828437711078L;
private final String propertyKey;
private final String stepLabel;
/**
* @param traversal the traversal that contains this step
* @param propertyKey the property key that we are looking up
* @param stepLabel
* the step label that refers to a previously visited value in the traversal.
* e.g. in {@code g.V().as(b)}, {@code b} is a step label.
*/
TitanPreviousPropertyStep(Traversal.Admin traversal, String propertyKey, String stepLabel) {
super(traversal);
this.propertyKey = Objects.requireNonNull(propertyKey);
this.stepLabel = Objects.requireNonNull(stepLabel);
}
@Override
protected Iterator<TitanVertex> flatMap(Traverser.Admin<S> traverser) {
TitanTransaction tx = TitanTraversalUtil.getTx(this.traversal);
// Retrieve property value to look-up, that is identified in the traversal by the `stepLabel`
Object value = getNullableScopeValue(Pop.first, stepLabel, traverser);
return value != null ? verticesWithProperty(tx, value) : emptyIterator();
}
/**
* Look up vertices in Titan which have a property {@link TitanPreviousPropertyStep#propertyKey} with the given
* value.
* @param tx the Titan transaction to read from
* @param value the value that the property should have
*/
private Iterator<TitanVertex> verticesWithProperty(TitanTransaction tx, Object value) {
return tx.query().has(propertyKey, value).vertices().iterator();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
TitanPreviousPropertyStep<?> that = (TitanPreviousPropertyStep<?>) o;
return propertyKey.equals(that.propertyKey) && stepLabel.equals(that.stepLabel);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + propertyKey.hashCode();
result = 31 * result + stepLabel.hashCode();
return result;
}
@Override
public String toString() {
return StringFactory.stepString(this, propertyKey, stepLabel);
}
@Override
public Set<String> getScopeKeys() {
return Collections.singleton(stepLabel);
}
@Override
public Set<TraverserRequirement> getRequirements() {
// This step requires being able to access previously visited properties in the traversal,
// so it needs `LABELED_PATH`.
return EnumSet.of(TraverserRequirement.LABELED_PATH);
}
}
|
0
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/factory/TitanPreviousPropertyStepStrategy.java
|
/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*
*/
package ai.grakn.factory;
import org.apache.tinkerpop.gremlin.process.traversal.Compare;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.Step;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategy.ProviderOptimizationStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.step.filter.TraversalFilterStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.filter.WherePredicateStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.PropertiesStep;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.AbstractTraversalStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import java.util.List;
import java.util.Optional;
/**
* Optimisation applied to use Titan indices in the following additional case:
* <p>
* <code>
* g.V().outE().values(c).as(b).V().filter(__.properties(a).where(P.eq(b)));
* </code>
* <p>
* In this instance, the vertex can be looked up directly in Titan, joining the {@code V().filter(..)}
* steps together.
*
* @author Felix Chapman
*/
public class TitanPreviousPropertyStepStrategy
extends AbstractTraversalStrategy<ProviderOptimizationStrategy> implements ProviderOptimizationStrategy {
private static final long serialVersionUID = 6888929702831948298L;
@Override
public void apply(Traversal.Admin<?, ?> traversal) {
// Retrieve all graph (`V()`) steps - this is the step the strategy should replace
List<GraphStep> graphSteps = TraversalHelper.getStepsOfAssignableClass(GraphStep.class, traversal);
for (GraphStep graphStep : graphSteps) {
// For each graph step, confirm it follows this pattern:
// `V().filter(__.properties(a).where(P.eq(b)))`
if (!(graphStep.getNextStep() instanceof TraversalFilterStep)) continue;
TraversalFilterStep<Vertex> filterStep = (TraversalFilterStep<Vertex>) graphStep.getNextStep();
// Retrieve the filter steps e.g. `__.properties(a).where(P.eq(b))`
List<Step> steps = stepsFromFilterStep(filterStep);
if (steps.size() < 2) continue;
Step propertiesStep = steps.get(0); // This is `properties(a)`
Step whereStep = steps.get(1); // This is `filter(__.where(P.eq(b)))`
// Get the property key `a`
if (!(propertiesStep instanceof PropertiesStep)) continue;
Optional<String> propertyKey = propertyFromPropertiesStep((PropertiesStep<Vertex>) propertiesStep);
if (!propertyKey.isPresent()) continue;
// Get the step label `b`
if (!(whereStep instanceof WherePredicateStep)) continue;
Optional<String> label = labelFromWhereEqPredicate((WherePredicateStep<Vertex>) whereStep);
if (!label.isPresent()) continue;
executeStrategy(traversal, graphStep, filterStep, propertyKey.get(), label.get());
}
}
private List<Step> stepsFromFilterStep(TraversalFilterStep<Vertex> filterStep) {
// TraversalFilterStep always has exactly one child, so this is safe
return filterStep.getLocalChildren().get(0).getSteps();
}
private Optional<String> propertyFromPropertiesStep(PropertiesStep<Vertex> propertiesStep) {
String[] propertyKeys = propertiesStep.getPropertyKeys();
if (propertyKeys.length != 1) return Optional.empty();
return Optional.of(propertyKeys[0]);
}
private Optional<String> labelFromWhereEqPredicate(WherePredicateStep<Vertex> whereStep) {
Optional<P<?>> optionalPredicate = whereStep.getPredicate();
return optionalPredicate.flatMap(predicate -> {
if (!predicate.getBiPredicate().equals(Compare.eq)) return Optional.empty();
return Optional.of((String) predicate.getValue());
});
}
/**
* Replace the {@code graphStep} and {@code filterStep} with a new {@link TitanPreviousPropertyStep} in the given
* {@code traversal}.
*/
private void executeStrategy(
Traversal.Admin<?, ?> traversal, GraphStep<?, ?> graphStep, TraversalFilterStep<Vertex> filterStep,
String propertyKey, String label) {
TitanPreviousPropertyStep newStep = new TitanPreviousPropertyStep(traversal, propertyKey, label);
traversal.removeStep(filterStep);
TraversalHelper.replaceStep(graphStep, newStep, traversal);
}
}
|
0
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/graph
|
java-sources/ai/grakn/titan-factory/0.16.0/ai/grakn/graph/internal/GraknTitanGraph.java
|
/*
* Grakn - A Distributed Semantic Database
* Copyright (C) 2016 Grakn Labs Limited
*
* Grakn is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Grakn is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Grakn. If not, see <http://www.gnu.org/licenses/gpl.txt>.
*/
package ai.grakn.graph.internal;
import ai.grakn.GraknTxType;
import ai.grakn.concept.Concept;
import ai.grakn.exception.GraknBackendException;
import ai.grakn.exception.TemporaryWriteException;
import ai.grakn.util.Schema;
import com.thinkaurelius.titan.core.TitanException;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanVertex;
import com.thinkaurelius.titan.core.util.TitanCleanup;
import com.thinkaurelius.titan.diskstorage.locking.PermanentLockingException;
import com.thinkaurelius.titan.diskstorage.locking.TemporaryLockingException;
import com.thinkaurelius.titan.graphdb.database.StandardTitanGraph;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import java.util.Properties;
import java.util.function.Supplier;
/**
* <p>
* A Grakn Graph using {@link TitanGraph} as a vendor backend.
* </p>
*
* <p>
* Wraps up a {@link TitanGraph} as a method of storing the Grakn Graph object Model.
* With this vendor some issues to be aware of:
* 1. Whenever a transaction is closed if none remain open then the connection to the graph is closed permanently.
* 2. Clearing the graph explicitly closes the connection as well.
* </p>
*
* @author fppt
*/
public class GraknTitanGraph extends AbstractGraknGraph<TitanGraph> {
public GraknTitanGraph(TitanGraph graph, String name, String engineUrl, Properties properties){
super(graph, name, engineUrl, properties);
}
/**
* Uses {@link TitanVertex#isModified()}
*
* @param concept A concept in the graph
* @return true if the concept has been modified
*/
@Override
public boolean isConceptModified(Concept concept) {
//TODO: Clean this crap up
if(concept instanceof ConceptImpl) {
TitanVertex vertex = (TitanVertex) ((ConceptImpl) concept).vertex().element();
return vertex.isModified() || vertex.isNew();
}
return true;
}
@Override
public void openTransaction(GraknTxType txType){
super.openTransaction(txType);
if(getTinkerPopGraph().isOpen() && !getTinkerPopGraph().tx().isOpen()) getTinkerPopGraph().tx().open();
}
@Override
public boolean isSessionClosed() {
return getTinkerPopGraph().isClosed();
}
@Override
public int numOpenTx() {
return ((StandardTitanGraph) getTinkerPopGraph()).getOpenTxs();
}
@Override
protected void clearGraph() {
TitanCleanup.clear(getTinkerPopGraph());
}
@Override
public void commitTransactionInternal(){
executeLockingMethod(() -> {
super.commitTransactionInternal();
return null;
});
}
@Override
VertexElement addVertex(Schema.BaseType baseType){
return executeLockingMethod(() -> super.addVertex(baseType));
}
/**
* Executes a method which has the potential to throw a {@link TemporaryLockingException} or a {@link PermanentLockingException}.
* If the exception is thrown it is wrapped in a {@link GraknBackendException} so that the transaction can be retried.
*
* @param method The locking method to execute
*/
private <X> X executeLockingMethod(Supplier<X> method){
try {
return method.get();
} catch (TitanException e){
if(e.isCausedBy(TemporaryLockingException.class) || e.isCausedBy(PermanentLockingException.class)){
throw TemporaryWriteException.temporaryLock(e);
} else {
throw GraknBackendException.unknown(e);
}
}
}
@Override
public void validVertex(Vertex vertex) {
super.validVertex(vertex);
if(((TitanVertex) vertex).isRemoved()){
throw new IllegalStateException("The vertex [" + vertex + "] has been removed and is no longer valid");
}
}
}
|
0
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services/s3/GranicaConfig.java
|
package ai.granica.awssdk.services.s3;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.time.LocalDateTime;
import software.amazon.awssdk.regions.internal.util.EC2MetadataUtils;
public class GranicaConfig {
static String ReadOrderEndpoints[] = { "main_read_endpoints", "main_write_endpoints", "failover_read_endpoints", "failover_write_endpoints" };
static String WriteOrderEndpoints[] = { "main_write_endpoints", "failover_write_endpoints" };
static List<String> HttpReadMethodTypes = Arrays.asList( "GET", "HEAD" ); // S3 operations get converted to one of the standard HTTP request methods https://docs.aws.amazon.com/apigateway/latest/developerguide/integrating-api-with-aws-services-s3.html
public static String Region = System.getenv("AWS_REGION") == null ? EC2MetadataUtils.getEC2InstanceRegion(): System.getenv("AWS_REGION");
public static String ZoneId = System.getenv("AWS_ZONE_ID") == null ? EC2MetadataUtils.getAvailabilityZone(): System.getenv("AWS_ZONE_ID");
public static String CustomDomain = System.getenv("GRANICA_CUSTOM_DOMAIN") != null ? System.getenv("GRANICA_CUSTOM_DOMAIN") : System.getenv("BOLT_CUSTOM_DOMAIN");
public static String AuthBucket = System.getenv("BOLT_AUTH_BUCKET");
public static String UserAgentPrefix = System.getenv("USER_AGENT_PREFIX") == null? "projectn/": String.format("%s/",System.getenv("USER_AGENT_PREFIX"));
static String BoltHostname = String.format("bolt.%s.%s", Region, CustomDomain);
static String QuicksilverUrl = String.format("https://quicksilver.%s.%s/services/bolt%s", Region, CustomDomain, ZoneId == null ? "": String.format("?az=%s", ZoneId));
static LocalDateTime RefreshTime = LocalDateTime.now().plusSeconds(120);
static Map<String, Object> BoltEndpoints = null;// getBoltEndpoints("");
static Random rand = new Random();
private static Map<String, Object> getBoltEndpoints(String errIp){
if (QuicksilverUrl == null || Region == null){
return null;
}
String requestUrl = errIp.length() > 0 ?
String.format("%s&err=%s",QuicksilverUrl, errIp) : QuicksilverUrl;
Map<String, Object> out = parse(executeGetRequest(requestUrl));
return out;
}
public static String executeGetRequest(String get_url) {
// Retries 3 times by default
CloseableHttpClient httpClient = HttpClients.custom().build();
final HttpGet httpGet = new HttpGet(get_url);
try (CloseableHttpResponse response = httpClient.execute(httpGet)) {
return new BasicResponseHandler().handleResponse(response);
}
catch(IOException e){
e.printStackTrace();
return null;
}
}
private static Map<String, Object> parse(String responseBody){
ObjectMapper mapper = new ObjectMapper();
Map<String, Object> map = null;
try {
map = mapper.readValue(responseBody, HashMap.class);
} catch (IOException e) {
e.printStackTrace();
}
return map;
}
public static void refreshBoltEndpoint(String errIp){
BoltEndpoints = getBoltEndpoints(errIp);
RefreshTime = LocalDateTime.now().plusSeconds(60+ rand.nextInt(120));
}
public static URI selectBoltEndpoints(String httpRequestMethod){
if (RefreshTime.isBefore(LocalDateTime.now()) || BoltEndpoints == null){
refreshBoltEndpoint("");
}
String[] preferredOrder = HttpReadMethodTypes.contains(httpRequestMethod) ? ReadOrderEndpoints : WriteOrderEndpoints;
// String[] endPointsKey;
for (String endPointsKey : preferredOrder){
if (BoltEndpoints.containsKey(endPointsKey)) {
List<String> endpoints = (List<String>)BoltEndpoints.get(endPointsKey);
if (endpoints.size() > 0) {
String selectedEndpoint = endpoints.get(rand.nextInt(endpoints.size()));
try {
return new URI(String.format("https://%s",selectedEndpoint));
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
}
}
return null;
}
}
|
0
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services/s3/GranicaHostnameVerifier.java
|
package ai.granica.awssdk.services.s3;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSession;
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
public class GranicaHostnameVerifier implements HostnameVerifier {
@Override
public boolean verify(String host, SSLSession sslSession) {
DefaultHostnameVerifier df = new DefaultHostnameVerifier();
return df.verify(GranicaConfig.BoltHostname, sslSession);
}
}
|
0
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services/s3/GranicaRetryCondition.java
|
package ai.granica.awssdk.services.s3;
import software.amazon.awssdk.core.retry.RetryPolicyContext;
import software.amazon.awssdk.core.retry.conditions.RetryCondition;
import software.amazon.awssdk.core.retry.conditions.SdkRetryCondition;
public interface GranicaRetryCondition extends RetryCondition {
@Override
default void requestSucceeded(RetryPolicyContext context) {
// TODO Auto-generated method stub
RetryCondition.super.requestSucceeded(context);
}
@Override
default void requestWillNotBeRetried(RetryPolicyContext context) {
// TODO Auto-generated method stub
RetryCondition.super.requestWillNotBeRetried(context);
}
@Override
default boolean shouldRetry(RetryPolicyContext context) {
// TODO Auto-generated method stub
return false;
}
public default RetryCondition create(){
return SdkRetryCondition.DEFAULT;
}
}
|
0
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services/s3/GranicaS3Client.java
|
package ai.granica.awssdk.services.s3;
import java.security.NoSuchAlgorithmException;
import java.util.MissingFormatArgumentException;
import javax.net.ssl.SSLContext;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
import software.amazon.awssdk.core.client.config.SdkAdvancedClientOption;
import software.amazon.awssdk.http.SdkHttpClient;
import software.amazon.awssdk.http.apache.ApacheHttpClient;
import software.amazon.awssdk.http.apache.internal.conn.SdkTlsSocketFactory;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.S3ClientBuilder;
import software.amazon.awssdk.services.s3.S3Configuration;
/**
* Service Client for accessing S3 via Bolt.
* It provides the same builder as S3Client to configure and create a service client. Its endpoint always resolves to
* Bolt Service URL as specified via the 'BOLT_URL' environment variable.
*/
public interface GranicaS3Client extends S3Client {
// Get bolt url from quicksilver. Not from the env
static S3Client create(){
return builder().build();
}
static S3ClientBuilder builder(){
if (GranicaConfig.CustomDomain == null){
throw new MissingFormatArgumentException("GRANICA_CUSTOM_DOMAIN is not set. \n"+
"Set the environment variable to GRANICA_CUSTOM_DOMAIN=my-bolt.my-domain.com. This can be obtained from `granica ls`");
}
SSLContext sslcontext = null;
try {
sslcontext = SSLContext.getDefault();
} catch (NoSuchAlgorithmException e1) {
System.out.println("SSL context is not initialized");
e1.printStackTrace();
}
ConnectionSocketFactory socketFactory = new SdkTlsSocketFactory(sslcontext, new GranicaHostnameVerifier());
SdkHttpClient client = ApacheHttpClient.builder().socketFactory(socketFactory).build();
return S3Client.builder()
.httpClient(client)
.serviceConfiguration(S3Configuration.builder()
.pathStyleAccessEnabled(true)
.build())
.overrideConfiguration(ClientOverrideConfiguration.builder()
.putAdvancedOption(SdkAdvancedClientOption.SIGNER, GranicaSigner.create())
.putHeader("X-Bolt-Passthrough-Read", "disable")
.build());
}
}
|
0
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services
|
java-sources/ai/granica/granica-java-sdk/1.0.5/ai/granica/awssdk/services/s3/GranicaSigner.java
|
package ai.granica.awssdk.services.s3;
import java.net.URI;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.RandomStringUtils;
import software.amazon.awssdk.auth.signer.Aws4Signer;
import software.amazon.awssdk.core.interceptor.ExecutionAttributes;
import software.amazon.awssdk.core.signer.Signer;
import software.amazon.awssdk.http.SdkHttpFullRequest;
import software.amazon.awssdk.http.SdkHttpFullRequest.Builder;
import software.amazon.awssdk.http.SdkHttpMethod;
// import com.amazonaws.util.Base64;
/**
* BoltSigner is the AWS4 protocol signer for Bolt. It uses the credentials from the incoming S3 request and
* canonically signs the request as a STS GetCallerIdentity API call.
*/
public class GranicaSigner implements Signer {
private final Aws4Signer aws4Signer;
private GranicaSigner() {
aws4Signer = Aws4Signer.create();
}
public static GranicaSigner create() {
return new GranicaSigner();
}
/**
* Method that takes in a Request and returns the signed version of the request that has been canonically signed as
* a STS GetCallerIdentity API call.
* @param request The request to sign
* @param executionAttributes attributes (e.g credentials) required for signing the request
* @return signed input request
*/
public SdkHttpFullRequest sign(SdkHttpFullRequest request, ExecutionAttributes executionAttributes) {
URI boltURI = GranicaConfig.selectBoltEndpoints(request.method().name());
String sourceBucket = request.getUri().getRawPath().split("/").length > 1?
request.getUri().getRawPath().split("/")[1] : "n-auth-dummy";
if (GranicaConfig.AuthBucket != null){
sourceBucket = GranicaConfig.AuthBucket;
}
String prefix = getRandomString();
String headObjectURL = String.format("https://s3.%s.amazonaws.com/%s/%s/auth", GranicaConfig.Region, sourceBucket, prefix);
Builder requestBuilder = request.toBuilder();
request = requestBuilder.uri(boltURI).build();
SdkHttpFullRequest headRequest = SdkHttpFullRequest
.builder()
.method(SdkHttpMethod.HEAD)
.uri(URI.create(headObjectURL))
//SHA value for empty payload. As head object request is with empty payload
.appendHeader("X-Amz-Content-Sha256", "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
.build();
SdkHttpFullRequest signedIamRequest = aws4Signer.sign(headRequest, executionAttributes);
SdkHttpFullRequest.Builder signedRequestBuilder = request.toBuilder();
Map<String, List<String>> iamHeaders = signedIamRequest.headers();
if (iamHeaders.containsKey("X-Amz-Security-Token")) {
signedRequestBuilder.putHeader("X-Amz-Security-Token", iamHeaders.get("X-Amz-Security-Token"));
}
if (iamHeaders.containsKey("X-Amz-Date")) {
signedRequestBuilder.putHeader("X-Amz-Date", iamHeaders.get("X-Amz-Date"));
}
if (iamHeaders.containsKey("Authorization")) {
signedRequestBuilder.putHeader("Authorization", iamHeaders.get("Authorization"));
}
if (iamHeaders.containsKey("x-amz-content-sha256")) {
signedRequestBuilder.putHeader("x-amz-content-sha256", iamHeaders.get("x-amz-content-sha256"));
}
signedRequestBuilder.putHeader("Host", GranicaConfig.BoltHostname);
signedRequestBuilder.putHeader("X-Bolt-Auth-Prefix", prefix);
signedRequestBuilder.putHeader("User-Agent", String.format("%s%s", GranicaConfig.UserAgentPrefix, request.headers().get("User-Agent")));
request = signedRequestBuilder.build();
return request;
}
private static String getRandomString(){
String chars = "qwertyuiopasdfghjklzxcvbnm";
return RandomStringUtils.random(4, chars);
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/backends/BackendModel.java
|
package deepwater.backends;
public interface BackendModel {
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/backends/BackendParams.java
|
package deepwater.backends;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class BackendParams<T> {
Map<String, Object> values = new HashMap();
ArrayList<String> args = new ArrayList();
public BackendParams set(String name, Object value ){
args.add(name);
values.put(name, value);
return this;
}
public Object get(String name){
return values.get(name);
}
public boolean has(String name){
return values.get(name) != null;
}
public Object get(String key, Object deflt) {
Object value = values.get(key);
if (value == null){
return deflt;
}
return value;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/backends/BackendTrain.java
|
package deepwater.backends;
import deepwater.datasets.ImageDataSet;
import java.io.File;
import java.io.IOException;
public interface BackendTrain {
void delete(BackendModel m);
// The method to construct a trainable Deep Water model.
// name specifies the model architecture, or is a path to a graph definition file
BackendModel buildNet(ImageDataSet dataset, RuntimeOptions opts,
BackendParams backend_params, int num_classes, String name);
void saveModel(BackendModel m, String model_path);
void deleteSavedModel(String model_path);
void loadParam(BackendModel m, String param_path);
void saveParam(BackendModel m, String param_path);
void deleteSavedParam(String param_path);
float[] loadMeanImage(BackendModel m, String path);
String toJson(BackendModel m);
// learning_rate
// weight_decay
// momentum
// clip_gradient: bool
void setParameter(BackendModel m, String name, float value);
// data[mini_batch_size * input_neurons]
// label[mini_batch_size]
// return value is to be ignored
// TODO: return void
float[] train(BackendModel m, float[] data, float[] label);
// data[mini_batch_size * input_neurons]
// returns float[mini_batch_size * nclasses] with per-class probabilities (regression: nclasses=1)
float[] predict(BackendModel m, float[] data);
// extract a hidden layer of given name
String listAllLayers(BackendModel m);
float[] extractLayer(BackendModel m, String name, float[] data);
void writeBytes(File file, byte[] payload) throws IOException;
byte[] readBytes(File file) throws IOException;
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/backends/RuntimeOptions.java
|
package deepwater.backends;
public class RuntimeOptions {
private boolean useGpu = true;
private long seed = System.nanoTime();
private int []deviceID = new int[]{0};
public int[] getDeviceID() {
return deviceID;
}
public void setDeviceID(int ... deviceID) {
this.deviceID = deviceID;
}
public boolean useGPU() {
return useGpu;
}
public void setUseGPU(boolean use_gpu) {
this.useGpu = use_gpu;
}
public long getSeed() {
return seed;
}
public void setSeed(long seed) {
this.seed = seed;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/BatchIterator.java
|
package deepwater.datasets;
import java.io.IOException;
import java.util.List;
public class BatchIterator {
private final ImageDataSet data;
private final int totalEpochs;
private int currentEpoch;
private List<Pair<Integer, float[]>> imageLabelList;
private int savedIterator;
private String[] images;
public BatchIterator(ImageDataSet data, int epochs, String... images) {
this.data = data;
this.totalEpochs = epochs;
this.currentEpoch = 0;
this.images = new String[]{};
this.images = images;
}
private void newEpoch() throws IOException {
this.currentEpoch++;
imageLabelList = data.loadImages(images);
}
public boolean next(ImageBatch b) throws IOException {
if (savedIterator == 0){
newEpoch();
}
if (currentEpoch > totalEpochs){
return false;
}
// clear the batch memory
b.reset();
for (int ii = savedIterator; ii < imageLabelList.size() ; ii++) {
int i = ii % b.size;
Pair<Integer, float[]> entry = imageLabelList.get(ii);
float[] image = entry.getSecond();
Integer label = entry.getFirst();
System.arraycopy(image, 0, b.images, i * image.length, image.length);
b.labels[i] = label;
i++;
savedIterator++;
if (i == b.size) {
// exit when the batch is full
return true;
}
}
// we finished the list of data to add to the batch.
savedIterator = 0;
return false;
}
public boolean nextEpochs() {
return currentEpoch <= totalEpochs;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/CIFAR10ImageDataset.java
|
package deepwater.datasets;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.awt.image.WritableRaster;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
public class CIFAR10ImageDataset extends ImageDataSet {
public CIFAR10ImageDataset(){
super(32, 32, 3, 10);
}
public List<Pair<Integer,float[]>> loadImages(String... filepath) throws IOException {
List<Pair<Integer,float[]>> images = new ArrayList<>();
for (String path: filepath) {
InputStream inputStream = new FileInputStream(path);
// Read one image
byte[] buffer = new byte[1 + (32 * 32 * 3)];
// Convert image to [w, h, channels]
while ((inputStream.read(buffer, 0, buffer.length)) != -1) {
int label = buffer[0] % 0xFF;
float[][] imageDataFloat = new float[3][32 * 32];
int i = 1;
for (int channel = 0; channel < 3; channel++) {
for (int j = 0; j < 32 * 32; j++) {
float result = buffer[i++] & 0xFF;
imageDataFloat[channel][j] = result;
}
}
assert i == 1 + 32 * 32 * 3;
float[] result = new float[32 * 32 * 3];
int k = 0;
for (int j = 0; j < 32 * 32; j++) {
for (int channel = 0; channel < 3; channel++) {
result[k++] = imageDataFloat[channel][j];
}
}
assert k == 32 * 32 * 3;
// BufferedImage image = new BufferedImage(32, 32, BufferedImage.TYPE_INT_RGB);
// WritableRaster raster = (WritableRaster) image.getRaster();
// raster.setPixels(0, 0, 32, 32, result);
//
// ImageIcon icon = new ImageIcon(image);
// JLabel jlabel = new JLabel(icon);
// JOptionPane.showMessageDialog(null, jlabel);
images.add(new Pair<>(label, result));
}
}
return images;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/FileUtils.java
|
package deepwater.datasets;
import java.io.File;
public class FileUtils {
public static String expandPath(String path) {
return path.replaceFirst("^~", System.getProperty("user.home"));
}
public static String findFile(String fname) {
// When run from eclipse, the working directory is different.
// Try pointing at another likely place
File file = new File(fname);
if (!file.exists())
file = new File("target/" + fname);
if (!file.exists())
file = new File("../" + fname);
if (!file.exists())
file = new File("../../" + fname);
if (!file.exists())
file = new File("../target/" + fname);
if (!file.exists())
file = new File(expandPath(fname));
if (!file.exists())
file = null;
if (file == null) {
return "";
}
return file.getAbsolutePath();
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/ImageBatch.java
|
package deepwater.datasets;
import java.util.Arrays;
public class ImageBatch {
final int size;
final float[] labels;
final float[] images;
public ImageBatch(ImageDataSet data, int size) {
this.size = size;
this.labels = new float[size];
this.images = new float[data.getHeight() * data.getWidth() * data.getChannels() * size];
}
void reset() {
Arrays.fill(labels, 0);
Arrays.fill(images, 0);
}
public float[] getLabels() {
return labels;
}
public float[] getImages() {
return images;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/ImageDataSet.java
|
package deepwater.datasets;
import java.io.IOException;
import java.util.List;
public class ImageDataSet {
int channels;
float[] meanData; //mean pixel value of the training data
private int width;
private int height;
private int numClasses;
public ImageDataSet(int width, int height, int channels, int numClasses){
this.height = height;
this.width = width;
this.channels = channels;
this.numClasses = numClasses;
}
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public int getChannels() {
return channels;
}
public void setChannels(int channels) {
this.channels = channels;
}
public float[] getMeanData() {
return meanData;
}
public void setMeanData(float[] meanData) {
int dim = channels*width*height;
if (meanData.length != dim) {
throw new IllegalArgumentException("Invalid mean image data format. Expected length: " + dim + ", but has length: " + meanData.length);
}
this.meanData = meanData;
}
public int getNumClasses() {
return numClasses;
}
public List<Pair<Integer,float[]>> loadImages(String... filename) throws IOException { return null; }
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/MNISTImageDataset.java
|
package deepwater.datasets;
// Inspired from http://stackoverflow.com/questions/8286668/how-to-read-mnist-data-in-c
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.zip.GZIPInputStream;
public class MNISTImageDataset extends ImageDataSet {
public static final Map<String, String> Resources = fillResources();
private static Map<String, String> fillResources() {
Map<String, String> resources = new HashMap<>();
resources.put("train_images", "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz");
resources.put("in_images","http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz");
resources.put("test_images", "http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz");
resources.put("test_labels","http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz");
return Collections.unmodifiableMap(resources);
}
private String labelFileName;
private String imageFileName;
// the following constants are defined as per the values described at http://yann.lecun.com/exdb/mnist/
private static final int MAGIC_OFFSET = 0;
private static final int OFFSET_SIZE = 4; //in bytes
private static final int LABEL_MAGIC = 2049;
private static final int IMAGE_MAGIC = 2051;
private static final int NUMBER_ITEMS_OFFSET = 4;
private static final int ITEMS_SIZE = 4;
private static final int NUMBER_OF_ROWS_OFFSET = 8;
private static final int ROWS_SIZE = 4;
public static final int ROWS = 28;
private static final int NUMBER_OF_COLUMNS_OFFSET = 12;
private static final int COLUMNS_SIZE = 4;
public static final int COLUMNS = 28;
private static final int IMAGE_OFFSET = 16;
private static final int IMAGE_SIZE = ROWS * COLUMNS;
public MNISTImageDataset() {
super(28, 28, 1, 10);
}
public List<Pair<Integer,float[]>> loadImages() throws IOException {
return loadImages(imageFileName, labelFileName);
}
public List<Pair<Integer,float[]>> loadImages(String... filenames) throws IOException {
assert (filenames.length % 2 == 0): "expected image and label";
List<Pair<Integer,float[]>> images = new ArrayList();
ByteArrayOutputStream labelBuffer = new ByteArrayOutputStream();
ByteArrayOutputStream imageBuffer = new ByteArrayOutputStream();
InputStream labelInputStream = new GZIPInputStream(new FileInputStream(filenames[1]));//this.getClass().getResourceAsStream(labelFileName);
InputStream imageInputStream = new GZIPInputStream(new FileInputStream(filenames[0])); //this.getClass().getResourceAsStream(imageFileName);
int read;
byte[] buffer = new byte[16384];
while((read = labelInputStream.read(buffer, 0, buffer.length)) != -1) {
labelBuffer.write(buffer, 0, read);
}
labelBuffer.flush();
while((read = imageInputStream.read(buffer, 0, buffer.length)) != -1) {
imageBuffer.write(buffer, 0, read);
}
imageBuffer.flush();
byte[] labelBytes = labelBuffer.toByteArray();
byte[] imageBytes = imageBuffer.toByteArray();
byte[] labelMagic = Arrays.copyOfRange(labelBytes, 0, OFFSET_SIZE);
byte[] imageMagic = Arrays.copyOfRange(imageBytes, 0, OFFSET_SIZE);
int magic = ByteBuffer.wrap(labelMagic).getInt();
if(magic != LABEL_MAGIC) {
throw new IOException("Bad magic number in label file got "+magic+"instead of "+LABEL_MAGIC);
}
if(ByteBuffer.wrap(imageMagic).getInt() != IMAGE_MAGIC) {
throw new IOException("Bad magic number in image file!");
}
int numberOfLabels = ByteBuffer.wrap(Arrays.copyOfRange(labelBytes, NUMBER_ITEMS_OFFSET, NUMBER_ITEMS_OFFSET + ITEMS_SIZE)).getInt();
int numberOfImages = ByteBuffer.wrap(Arrays.copyOfRange(imageBytes, NUMBER_ITEMS_OFFSET, NUMBER_ITEMS_OFFSET + ITEMS_SIZE)).getInt();
if(numberOfImages != numberOfLabels) {
throw new IOException("The number of labels and images do not match!");
}
int numRows = ByteBuffer.wrap(Arrays.copyOfRange(imageBytes, NUMBER_OF_ROWS_OFFSET, NUMBER_OF_ROWS_OFFSET + ROWS_SIZE)).getInt();
int numCols = ByteBuffer.wrap(Arrays.copyOfRange(imageBytes, NUMBER_OF_COLUMNS_OFFSET, NUMBER_OF_COLUMNS_OFFSET + COLUMNS_SIZE)).getInt();
if(numRows != ROWS && numCols != COLUMNS) {
throw new IOException("Bad image. Rows and columns do not equal " + ROWS + "x" + COLUMNS);
}
for(int i = 0; i < numberOfLabels; i++) {
int label = labelBytes[OFFSET_SIZE + ITEMS_SIZE + i];
byte[] imageData = Arrays.copyOfRange(imageBytes, (i * IMAGE_SIZE) + IMAGE_OFFSET, (i * IMAGE_SIZE) + IMAGE_OFFSET + IMAGE_SIZE);
float[] imageDataFloat = new float[ROWS * COLUMNS];
int p = 0;
for (int j = 0; j < imageData.length; j++) {
float result = imageData[j] & 0xFF;
// Convert from [0,255] to [0.0, 1.0]
result *= 1.0/255.0;
imageDataFloat[p] = result;
p++;
}
assert p == ROWS * COLUMNS: "Expected: "+ROWS*COLUMNS+" GOT: "+p;
images.add(new Pair(label, imageDataFloat));
}
return images;
}
}
|
0
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater
|
java-sources/ai/h2o/deepwater-backend-api/1.0.4/deepwater/datasets/Pair.java
|
package deepwater.datasets;
import java.util.AbstractMap.SimpleImmutableEntry;
public class Pair<F, S> extends SimpleImmutableEntry<F, S> {
public Pair( F f, S s ) {
super( f, s );
}
public F getFirst() {
return getKey();
}
public S getSecond() {
return getValue();
}
public String toString() {
return "["+getKey()+","+getValue()+"]";
}
}
|
0
|
java-sources/ai/h2o/featurestore-azure-gen2-spark-dependencies/1.1.2/ai/h2o/featurestore
|
java-sources/ai/h2o/featurestore-azure-gen2-spark-dependencies/1.1.2/ai/h2o/featurestore/abfs/FeatureStoreSASTokenProvider.java
|
package ai.h2o.featurestore.abfs;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider;
import org.apache.hadoop.security.AccessControlException;
/**
* A class that provides SAS tokens for Feature Store operations on Azure Blob Storage.
*/
public class FeatureStoreSASTokenProvider implements SASTokenProvider {
private Configuration configuration;
@Override
public void initialize(Configuration configuration, String accountName) throws IOException {
this.configuration = configuration;
}
@Override
public String getSASToken(String account, String fileSystem, String path, String operation)
throws IOException, AccessControlException {
String rootToken =
configuration.get(
String.format("spark.hadoop.ai.h2o.featurestore.storage.%s.%s.rootSasToken", account, fileSystem));
String correctedPath = getCorrectedPath(path);
String token;
if (correctedPath.isEmpty()) {
token = rootToken;
} else {
Path tokenPath = FileSystems.getDefault().getPath(correctedPath);
token = findPathToken(account, fileSystem, tokenPath);
}
if (token != null && !token.isEmpty()) {
return token;
} else {
return configuration.get("spark.hadoop.ai.h2o.featurestore.storage.staticToken");
}
}
private String getCorrectedPath(String path) {
String correctedPath = path.replaceAll("/+", "/");
if (correctedPath.startsWith("/")) {
return correctedPath.substring(1);
} else {
return correctedPath;
}
}
private String findPathToken(String account, String fileSystem, Path path) {
if (path == null) {
return configuration.get(
String.format("spark.hadoop.ai.h2o.featurestore.storage.%s.%s.pathSasToken", account, fileSystem));
}
String token =
configuration.get(
String.format("spark.hadoop.ai.h2o.featurestore.storage.%s.%s.%s.pathSasToken", account, fileSystem, path));
if (token != null) {
return token;
} else {
return findPathToken(account, fileSystem, path.getParent());
}
}
}
|
0
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore/Token.java
|
package ai.h2o.featurestore;
import java.time.Instant;
public class Token {
private final String token;
private final Instant expirationTime;
public Token(String token, Instant expirationTime) {
this.token = token;
this.expirationTime = expirationTime;
}
public static Token createToken(String token, String expirationTime) {
if(token == null || token.isEmpty()) {
return null;
}
if(expirationTime != null && !expirationTime.isEmpty()) {
return new Token(token, Instant.parse(expirationTime));
} else {
return new Token(token, null);
}
}
public String getToken() {
return token;
}
public Instant getExpirationTime() {
return expirationTime;
}
}
|
0
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore/TokenProvider.java
|
package ai.h2o.featurestore;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TokenProvider {
private final static Logger log = LoggerFactory.getLogger(TokenProvider.class);
private final Configuration configuration;
private final String prefix;
public TokenProvider(Configuration configuration, String prefix) {
this.configuration = configuration;
this.prefix = prefix;
}
public Token getToken(String path) {
String rootToken =
configuration.get(
String.format("%s.rootSasToken", prefix));
String correctedPath = getCorrectedPath(path);
Token token;
if (correctedPath.isEmpty()) {
return Token.createToken(rootToken, null);
} else {
Path tokenPath = FileSystems.getDefault().getPath(correctedPath);
token = findPathToken(tokenPath);
}
if (token != null) {
return token;
} else {
return Token.createToken(configuration.get("spark.hadoop.ai.h2o.featurestore.storage.staticToken"), null);
}
}
private String getCorrectedPath(String path) {
String correctedPath = path.replaceAll("/+", "/");
if (correctedPath.startsWith("/")) {
return correctedPath.substring(1);
} else {
return correctedPath;
}
}
private Token findPathToken(Path path) {
log.info("Retrieving token for: {}", path);
if (path == null) {
String sasToken = configuration.get(prefix + ".pathSasToken");
String expirationTime = configuration.get(prefix + ".expirationTime");
return Token.createToken(sasToken, expirationTime);
}
return getFilePath(path).or(() -> getDirectoryPath(path)).orElseGet(() -> findPathToken(path.getParent()));
}
private Optional<Token> getFilePath(Path path) {
return getFilePath(path.toString());
}
private Optional<Token> getFilePath(String path) {
String sasToken =
configuration.get(
String.format("%s.%s.pathSasToken", prefix, path));
String expirationTime =
configuration.get(
String.format("%s.%s.expirationTime", prefix, path));
return Optional.ofNullable(Token.createToken(sasToken, expirationTime));
}
private Optional<Token> getDirectoryPath(Path path) {
return getFilePath(path.toString() + "/");
}
}
|
0
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore/abfs/FeatureStoreSASTokenProvider.java
|
package ai.h2o.featurestore.abfs;
import ai.h2o.featurestore.Token;
import ai.h2o.featurestore.TokenProvider;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider;
import org.apache.hadoop.security.AccessControlException;
/**
* A class that provides SAS tokens for Feature Store operations on Azure Blob Storage.
*/
public class FeatureStoreSASTokenProvider implements SASTokenProvider {
private Configuration configuration;
@Override
public void initialize(Configuration configuration, String accountName) {
this.configuration = configuration;
}
@Override
public String getSASToken(String account, String fileSystem, String path, String operation) {
TokenProvider tokenProvider = new TokenProvider(configuration,
String.format("spark.hadoop.ai.h2o.featurestore.storage.%s.%s", account, fileSystem));
Token token = tokenProvider.getToken(path);
if(token != null) {
return token.getToken();
} else {
return null;
}
}
}
|
0
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore
|
java-sources/ai/h2o/featurestore-spark-dependencies/1.2.0/ai/h2o/featurestore/gs/FeatureStoreTokenProvider.java
|
package ai.h2o.featurestore.gs;
import ai.h2o.featurestore.Token;
import ai.h2o.featurestore.TokenProvider;
import com.google.cloud.hadoop.util.AccessBoundary;
import com.google.cloud.hadoop.util.AccessTokenProvider;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
public class FeatureStoreTokenProvider implements AccessTokenProvider {
private Configuration configuration;
@Override
public AccessToken getAccessToken() {
return null;
}
@Override
public AccessToken getAccessToken(List<AccessBoundary> accessBoundaries) {
AccessBoundary accessBoundary = accessBoundaries.get(0);
TokenProvider tokenProvider = new TokenProvider(configuration,
String.format("spark.hadoop.ai.h2o.featurestore.storage.%s", accessBoundary.bucketName()));
Token token = tokenProvider.getToken(accessBoundary.objectName());
if (token != null) {
return new AccessToken(token.getToken(), Duration.between(Instant.now(), token.getExpirationTime()).toMillis());
} else {
return null;
}
}
@Override
public AccessTokenType getAccessTokenType() {
return AccessTokenType.DOWNSCOPED;
}
@Override
public void refresh() {
}
@Override
public void setConf(Configuration conf) {
this.configuration = conf;
}
@Override
public Configuration getConf() {
return configuration;
}
}
|
0
|
java-sources/ai/h2o/google-analytics-java/1.1.2-H2O-CUSTOM/com/brsanthu
|
java-sources/ai/h2o/google-analytics-java/1.1.2-H2O-CUSTOM/com/brsanthu/googleanalytics/AwtRequestParameterDiscoverer.java
|
package com.brsanthu.googleanalytics;
import static com.brsanthu.googleanalytics.GaUtils.isEmpty;
import java.awt.Dimension;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.Toolkit;
/**
* Clases uses AWT classes to discover following properties.
* <ul>
* <li>Screen Resolution</li>
* <li>Screen Colors</li>
* </ul>
*
* @author Santhosh Kumar
*
* This copy of google-analytics-java is a back port of version 1.1.2 of the library.
* This backport removes the slf4j dependency, and modifies the code to work with the
* 4.1 version of the Apache http client library.
*
* Original sources can be found at https://github.com/brsanthu/google-analytics-java.
* All copyrights retained by original authors.
*/
public class AwtRequestParameterDiscoverer extends DefaultRequestParameterDiscoverer {
@Override
public DefaultRequest discoverParameters(GoogleAnalyticsConfig config, DefaultRequest request) {
super.discoverParameters(config, request);
Toolkit toolkit = Toolkit.getDefaultToolkit();
if (isEmpty(request.screenResolution())) {
Dimension screenSize = toolkit.getScreenSize();
request.screenResolution(((int) screenSize.getWidth()) + "x" + ((int) screenSize.getHeight()) + ", " + toolkit.getScreenResolution() + " dpi");
}
if (isEmpty(request.screenColors())) {
GraphicsEnvironment graphicsEnvironment = GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsDevice[] graphicsDevices = graphicsEnvironment.getScreenDevices();
StringBuilder sb = new StringBuilder();
for (GraphicsDevice graphicsDevice : graphicsDevices) {
if (sb.length() != 0) {
sb.append(", ");
}
sb.append(graphicsDevice.getDisplayMode().getBitDepth());
}
request.screenColors(sb.toString());
}
return request;
}
}
|
0
|
java-sources/ai/h2o/google-analytics-java/1.1.2-H2O-CUSTOM/com/brsanthu
|
java-sources/ai/h2o/google-analytics-java/1.1.2-H2O-CUSTOM/com/brsanthu/googleanalytics/DefaultRequest.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.brsanthu.googleanalytics;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.CURRENCY_CODE;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.DNS_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EVENT_ACTION;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EVENT_CATEGORY;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EVENT_LABEL;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EVENT_VALUE;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EXCEPTION_DESCRIPTION;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.EXCEPTION_FATAL;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.ITEM_CATEGORY;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.ITEM_CODE;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.ITEM_NAME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.ITEM_PRICE;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.ITEM_QUANTITY;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.PAGE_DOWNLOAD_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.PAGE_LOAD_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.REDIRECT_RESPONSE_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.SERVER_RESPONSE_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.SOCIAL_ACTION;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.SOCIAL_ACTION_TARGET;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.SOCIAL_NETWORK;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TCP_CONNECT_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TRANSACTION_AFFILIATION;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TRANSACTION_ID;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TRANSACTION_REVENUE;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TRANSACTION_SHIPPING;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.TRANSACTION_TAX;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.USER_TIMING_CATEGORY;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.USER_TIMING_LABEL;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.USER_TIMING_TIME;
import static com.brsanthu.googleanalytics.GoogleAnalyticsParameter.USER_TIMING_VARIABLE_NAME;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.UUID;
/**
* Default request that captures default value for any of the parameters. Create an instance of
* this object and specify as constructor parameter to {@link GoogleAnalytics} or set one any time using
* {@link GoogleAnalytics#setDefaultRequest(DefaultRequest)} method.
*
* @author Santhosh Kumar
*
* This copy of google-analytics-java is a back port of version 1.1.2 of the library.
* This backport removes the slf4j dependency, and modifies the code to work with the
* 4.1 version of the Apache http client library.
*
* Original sources can be found at https://github.com/brsanthu/google-analytics-java.
* All copyrights retained by original authors.
*/
public class DefaultRequest extends GoogleAnalyticsRequest<DefaultRequest>{
public DefaultRequest() {
this(null, null, null, null);
}
public DefaultRequest(String hitType) {
this(hitType, null, null, null);
}
public DefaultRequest(String hitType, String trackingId, String appName, String appVersion) {
String cId;
hitType(isEmpty(hitType) ? "pageview" : hitType);
trackingId(trackingId);
applicationName(appName);
applicationVersion(appVersion);
try { // Use MAC addr and user id to hash into a UUID
cId = UUID.nameUUIDFromBytes((InetAddress.getLocalHost().getHostAddress().toString()+System.getProperty("user.name")).getBytes()).toString();
} catch (UnknownHostException e) {
cId = UUID.nameUUIDFromBytes(System.getProperty("user.name").getBytes()).toString();
}
clientId(cId);
}
/**
* <h2 id="events">Event Tracking</h2>
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the event category. Must not be empty.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ec</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>150 Bytes
* </td>
* <td>event</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Category</code><br>
* Example usage: <code>ec=Category</code>
* </div>
* </div>
*/
public DefaultRequest eventCategory(String value) {
setString(EVENT_CATEGORY, value);
return this;
}
public String eventCategory() {
return getString(EVENT_CATEGORY);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the event action. Must not be empty.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ea</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>event</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Action</code><br>
* Example usage: <code>ea=Action</code>
* </div>
* </div>
*/
public DefaultRequest eventAction(String value) {
setString(EVENT_ACTION, value);
return this;
}
public String eventAction() {
return getString(EVENT_ACTION);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the event label.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>el</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>event</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Label</code><br>
* Example usage: <code>el=Label</code>
* </div>
* </div>
*/
public DefaultRequest eventLabel(String value) {
setString(EVENT_LABEL, value);
return this;
}
public String eventLabel() {
return getString(EVENT_LABEL);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the event value. Values must be non-negative.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ev</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>event</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>55</code><br>
* Example usage: <code>ev=55</code>
* </div>
* </div>
*/
public DefaultRequest eventValue(Integer value) {
setInteger(EVENT_VALUE, value);
return this;
}
public Integer eventValue() {
return getInteger(EVENT_VALUE);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the description of an exception.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>exd</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>150 Bytes
* </td>
* <td>exception</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>DatabaseError</code><br>
* Example usage: <code>exd=DatabaseError</code>
* </div>
* </div>
*/
public DefaultRequest exceptionDescription(String value) {
setString(EXCEPTION_DESCRIPTION, value);
return this;
}
public String exceptionDescription() {
return getString(EXCEPTION_DESCRIPTION);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies whether the exception was fatal.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>exf</code></td>
* <td>boolean</td>
* <td><code>1</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>exception</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>0</code><br>
* Example usage: <code>exf=0</code>
* </div>
* </div>
*/
public DefaultRequest exceptionFatal(Boolean value) {
setBoolean(EXCEPTION_FATAL, value);
return this;
}
public Boolean exceptionFatal() {
return getBoolean(EXCEPTION_FATAL);
}
/**
* <div class="ind">
* <p>
* <strong>Required for item hit type.</strong>
* </p>
* <p>Specifies the item name.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>in</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Shoe</code><br>
* Example usage: <code>in=Shoe</code>
* </div>
* </div>
*/
public DefaultRequest itemName(String value) {
setString(ITEM_NAME, value);
return this;
}
public String itemName() {
return getString(ITEM_NAME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the price for a single item / unit.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ip</code></td>
* <td>currency</td>
* <td><code>0</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>3.50</code><br>
* Example usage: <code>ip=3.50</code>
* </div>
* </div>
*/
public DefaultRequest itemPrice(Double value) {
setDouble(ITEM_PRICE, value);
return this;
}
public Double itemPrice() {
return getDouble(ITEM_PRICE);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the number of items purchased.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>iq</code></td>
* <td>integer</td>
* <td><code>0</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>4</code><br>
* Example usage: <code>iq=4</code>
* </div>
* </div>
*/
public DefaultRequest itemQuantity(Integer value) {
setInteger(ITEM_QUANTITY, value);
return this;
}
public Integer itemQuantity() {
return getInteger(ITEM_QUANTITY);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the SKU or item code.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ic</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>SKU47</code><br>
* Example usage: <code>ic=SKU47</code>
* </div>
* </div>
*/
public DefaultRequest itemCode(String value) {
setString(ITEM_CODE, value);
return this;
}
public String itemCode() {
return getString(ITEM_CODE);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the category that the item belongs to.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>iv</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Blue</code><br>
* Example usage: <code>iv=Blue</code>
* </div>
* </div>
*/
public DefaultRequest itemCategory(String value) {
setString(ITEM_CATEGORY, value);
return this;
}
public String itemCategory() {
return getString(ITEM_CATEGORY);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>When present indicates the local currency for all transaction currency values. Value should be a valid ISO 4217 currency code.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>cu</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>10 Bytes
* </td>
* <td>transaction, item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>EUR</code><br>
* Example usage: <code>cu=EUR</code>
* </div>
* </div>
*/
public DefaultRequest currencyCode(String value) {
setString(CURRENCY_CODE, value);
return this;
}
public String currencyCode() {
return getString(CURRENCY_CODE);
}
/**
* <div class="ind">
* <p>
* <strong>Required for social hit type.</strong>
* </p>
* <p>Specifies the social network, for example Facebook or Google Plus.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>sn</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>50 Bytes
* </td>
* <td>social</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>facebook</code><br>
* Example usage: <code>sn=facebook</code>
* </div>
* </div>
*/
public DefaultRequest socialNetwork(String value) {
setString(SOCIAL_NETWORK, value);
return this;
}
public String socialNetwork() {
return getString(SOCIAL_NETWORK);
}
/**
* <div class="ind">
* <p>
* <strong>Required for social hit type.</strong>
* </p>
* <p>Specifies the social interaction action. For example on Google Plus when a user clicks the +1 button, the social action is 'plus'.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>sa</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>50 Bytes
* </td>
* <td>social</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>like</code><br>
* Example usage: <code>sa=like</code>
* </div>
* </div>
*/
public DefaultRequest socialAction(String value) {
setString(SOCIAL_ACTION, value);
return this;
}
public String socialAction() {
return getString(SOCIAL_ACTION);
}
/**
* <div class="ind">
* <p>
* <strong>Required for social hit type.</strong>
* </p>
* <p>Specifies the target of a social interaction. This value is typically a URL but can be any text.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>st</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>2048 Bytes
* </td>
* <td>social</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>http://foo.com</code><br>
* Example usage: <code>st=http%3A%2F%2Ffoo.com</code>
* </div>
* </div>
*/
public DefaultRequest socialActionTarget(String value) {
setString(SOCIAL_ACTION_TARGET, value);
return this;
}
public String socialActionTarget() {
return getString(SOCIAL_ACTION_TARGET);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the user timing category.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>utc</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>150 Bytes
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>category</code><br>
* Example usage: <code>utc=category</code>
* </div>
* </div>
*/
public DefaultRequest userTimingCategory(String value) {
setString(USER_TIMING_CATEGORY, value);
return this;
}
public String userTimingCategory() {
return getString(USER_TIMING_CATEGORY);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the user timing variable.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>utv</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>lookup</code><br>
* Example usage: <code>utv=lookup</code>
* </div>
* </div>
*/
public DefaultRequest userTimingVariableName(String value) {
setString(USER_TIMING_VARIABLE_NAME, value);
return this;
}
public String userTimingVariableName() {
return getString(USER_TIMING_VARIABLE_NAME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the user timing value. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>utt</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>123</code><br>
* Example usage: <code>utt=123</code>
* </div>
* </div>
*/
public DefaultRequest userTimingTime(Integer value) {
setInteger(USER_TIMING_TIME, value);
return this;
}
public Integer userTimingTime() {
return getInteger(USER_TIMING_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the user timing label.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>utl</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>label</code><br>
* Example usage: <code>utl=label</code>
* </div>
* </div>
*/
public DefaultRequest userTimingLabel(String value) {
setString(USER_TIMING_LABEL, value);
return this;
}
public String userTimingLabel() {
return getString(USER_TIMING_LABEL);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took for a page to load. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>plt</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>3554</code><br>
* Example usage: <code>plt=3554</code>
* </div>
* </div>
*/
public DefaultRequest pageLoadTime(Integer value) {
setInteger(PAGE_LOAD_TIME, value);
return this;
}
public Integer pageLoadTime() {
return getInteger(PAGE_LOAD_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took to do a DNS lookup.The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>dns</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>43</code><br>
* Example usage: <code>dns=43</code>
* </div>
* </div>
*/
public DefaultRequest dnsTime(Integer value) {
setInteger(DNS_TIME, value);
return this;
}
public Integer dnsTime() {
return getInteger(DNS_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took for the page to be downloaded. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>pdt</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>500</code><br>
* Example usage: <code>pdt=500</code>
* </div>
* </div>
*/
public DefaultRequest pageDownloadTime(Integer value) {
setInteger(PAGE_DOWNLOAD_TIME, value);
return this;
}
public Integer pageDownloadTime() {
return getInteger(PAGE_DOWNLOAD_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took for any redirects to happen. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>rrt</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>500</code><br>
* Example usage: <code>rrt=500</code>
* </div>
* </div>
*/
public DefaultRequest redirectResponseTime(Integer value) {
setInteger(REDIRECT_RESPONSE_TIME, value);
return this;
}
public Integer redirectResponseTime() {
return getInteger(REDIRECT_RESPONSE_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took for a TCP connection to be made. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>tcp</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>500</code><br>
* Example usage: <code>tcp=500</code>
* </div>
* </div>
*/
public DefaultRequest tcpConnectTime(Integer value) {
setInteger(TCP_CONNECT_TIME, value);
return this;
}
public Integer tcpConnectTime() {
return getInteger(TCP_CONNECT_TIME);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the time it took for the server to respond after the connect time. The value is in milliseconds.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>srt</code></td>
* <td>integer</td>
* <td><span class="none">None</span>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>timing</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>500</code><br>
* Example usage: <code>srt=500</code>
* </div>
* </div>
*/
public DefaultRequest serverResponseTime(Integer value) {
setInteger(SERVER_RESPONSE_TIME, value);
return this;
}
public Integer serverResponseTime() {
return getInteger(SERVER_RESPONSE_TIME);
}
/**
* <div class="ind">
* <p>
* <strong>Required for transaction hit type.</strong>
* <br>
* <strong>Required for item hit type.</strong>
* </p>
* <p>A unique identifier for the transaction. This value should be the same for both the Transaction hit and Items hits associated to the particular transaction.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ti</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>transaction, item</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>OD564</code><br>
* Example usage: <code>ti=OD564</code>
* </div>
* </div>
*/
public DefaultRequest txId(String value) {
setString(TRANSACTION_ID, value);
return this;
}
public String txId() {
return getString(TRANSACTION_ID);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the affiliation or store name.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ta</code></td>
* <td>text</td>
* <td><span class="none">None</span>
* </td>
* <td>500 Bytes
* </td>
* <td>transaction</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>Member</code><br>
* Example usage: <code>ta=Member</code>
* </div>
* </div>
*/
public DefaultRequest txAffiliation(String value) {
setString(TRANSACTION_AFFILIATION, value);
return this;
}
public String txAffiliation() {
return getString(TRANSACTION_AFFILIATION);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the total revenue associated with the transaction. This value should include any shipping or tax costs.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>tr</code></td>
* <td>currency</td>
* <td><code>0</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>transaction</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>15.47</code><br>
* Example usage: <code>tr=15.47</code>
* </div>
* </div>
*/
public DefaultRequest txRevenue(Double value) {
setDouble(TRANSACTION_REVENUE, value);
return this;
}
public Double txRevenue() {
return getDouble(TRANSACTION_REVENUE);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the total shipping cost of the transaction.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>ts</code></td>
* <td>currency</td>
* <td><code>0</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>transaction</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>3.50</code><br>
* Example usage: <code>ts=3.50</code>
* </div>
* </div>
*/
public DefaultRequest txShipping(Double value) {
setDouble(TRANSACTION_SHIPPING, value);
return this;
}
public Double txShipping() {
return getDouble(TRANSACTION_SHIPPING);
}
/**
* <div class="ind">
* <p>
* Optional.
* </p>
* <p>Specifies the total tax of the transaction.</p>
* <table border="1">
* <tbody>
* <tr>
* <th>Parameter</th>
* <th>Value Type</th>
* <th>Default Value</th>
* <th>Max Length</th>
* <th>Supported Hit Types</th>
* </tr>
* <tr>
* <td><code>tt</code></td>
* <td>currency</td>
* <td><code>0</code>
* </td>
* <td><span class="none">None</span>
* </td>
* <td>transaction</td>
* </tr>
* </tbody>
* </table>
* <div>
* Example value: <code>11.20</code><br>
* Example usage: <code>tt=11.20</code>
* </div>
* </div>
*/
public DefaultRequest txTax(Double value) {
setDouble(TRANSACTION_TAX, value);
return this;
}
public Double txTax() {
return getDouble(TRANSACTION_TAX);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.